public void Generate(
            TensorProxy tensorProxy, int batchSize, IEnumerable <Agent> agents)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var agent in agents)
            {
                var agentInfo = agent.Info;
                var memory    = agentInfo.memories;

                var offset = memorySize * m_MemoryIndex;

                if (memory == null)
                {
                    agentIndex++;
                    continue;
                }
                for (var j = 0; j < memorySize; j++)
                {
                    if (j >= memory.Count)
                    {
                        break;
                    }
                    tensorProxy.data[agentIndex, j] = memory[j + offset];
                }
                agentIndex++;
            }
        }
        /// <summary>
        /// Generates the Tensor inputs that are expected to be present in the Model.
        /// </summary>
        /// <param name="model">
        /// The Barracuda engine model for loading static parameters.
        /// </param>
        /// <returns>TensorProxy IEnumerable with the expected Tensor inputs.</returns>
        public static IReadOnlyList <TensorProxy> GetInputTensors(Model model)
        {
            var tensors = new List <TensorProxy>();

            if (model == null)
            {
                return(tensors);
            }

            foreach (var input in model.inputs)
            {
                tensors.Add(new TensorProxy
                {
                    name      = input.name,
                    valueType = TensorProxy.TensorType.FloatingPoint,
                    data      = null,
                    shape     = input.shape.Select(i => (long)i).ToArray()
                });
            }

            foreach (var mem in model.memories)
            {
                tensors.Add(new TensorProxy
                {
                    name      = mem.input,
                    valueType = TensorProxy.TensorType.FloatingPoint,
                    data      = null,
                    shape     = TensorUtils.TensorShapeFromBarracuda(mem.shape)
                });
            }

            tensors.Sort((el1, el2) => el1.name.CompareTo(el2.name));

            return(tensors);
        }
Beispiel #3
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <Agent> agents)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex  = 0;

            foreach (var agent in agents)
            {
                var tensorOffset = 0;
                // Write each sensor consecutively to the tensor
                foreach (var sensorIndex in m_SensorIndices)
                {
                    m_WriteAdapter.SetTarget(tensorProxy, agentIndex, tensorOffset);
                    var sensor     = agent.sensors[sensorIndex];
                    var numWritten = sensor.Write(m_WriteAdapter);
                    tensorOffset += numWritten;
                }
                Debug.AssertFormat(
                    tensorOffset == vecObsSizeT,
                    "mismatch between vector observation size ({0}) and number of observations written ({1})",
                    vecObsSizeT, tensorOffset
                    );

                agentIndex++;
            }
        }
Beispiel #4
0
        public void Generate(
            TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var memorySize = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var agent in agentInfo.Keys)
            {
                var memory = agentInfo[agent].memories;
                if (memory == null)
                {
                    agentIndex++;
                    continue;
                }
                for (var j = 0; j < Math.Min(memorySize, memory.Count); j++)
                {
                    if (j >= memory.Count)
                    {
                        break;
                    }
                    tensorProxy.data[agentIndex, j] = memory[j];
                }
                agentIndex++;
            }
        }
Beispiel #5
0
        public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, _allocator);

            var memorySize = (int)tensorProxy.Shape[tensorProxy.Shape.Length - 1];
            var agentIndex = 0;

            foreach (var agent in agentInfo.Keys)
            {
                var memory = agentInfo[agent].memories;

                int offset = memorySize * memoryIndex;

                if (memory == null)
                {
                    agentIndex++;
                    continue;
                }
                for (var j = 0; j < memorySize; j++)
                {
                    if (j >= memory.Count)
                    {
                        break;
                    }
                    tensorProxy.Data[agentIndex, j] = memory[j + offset];
                }
                agentIndex++;
            }
        }
Beispiel #6
0
        public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
        {
            var textures = agentInfo.Keys.Select(
                agent => agentInfo[agent].visualObservations[_index]).ToList();

            TensorUtils.ResizeTensor(tensorProxy, batchSize, _allocator);
            Utilities.TextureToTensorProxy(tensorProxy, textures, _grayScale, _allocator);
        }
Beispiel #7
0
        List <TensorProxy> FetchBarracudaOutputs(string[] names)
        {
            var outputs = new List <TensorProxy>();

            foreach (var n in names)
            {
                var output = m_Engine.PeekOutput(n);
                outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n));
            }

            return(outputs);
        }
Beispiel #8
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <Agent> agents)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var agentIndex = 0;

            foreach (var agent in agents)
            {
                m_WriteAdapter.SetTarget(tensorProxy, agentIndex, 0);
                agent.sensors[m_SensorIndex].Write(m_WriteAdapter);
                agentIndex++;
            }
        }
Beispiel #9
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var sensor = infoSensorPair.sensors[m_SensorIndex];
                m_WriteAdapter.SetTarget(tensorProxy, agentIndex, 0);
                sensor.Write(m_WriteAdapter);
                agentIndex++;
            }
        }
        public void Generate(
            TensorProxy tensorProxy, int batchSize, IEnumerable <Agent> agents)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var agentIndex = 0;

            foreach (var agent in agents)
            {
                // TODO direct access to sensors list here - should we do it differently?
                // TODO m_Index here is the visual observation index. Will work for now but not if we add more sensor types.
                agent.m_Sensors[m_Index].WriteToTensor(tensorProxy, agentIndex);
                agentIndex++;
            }
        }
Beispiel #11
0
        public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, _allocator);
            var vecObsSizeT = tensorProxy.Shape[tensorProxy.Shape.Length - 1];

            var agentIndex = 0;

            foreach (var agent in agentInfo.Keys)
            {
                var vectorObs = agentInfo[agent].stackedVectorObservation;
                for (var j = 0; j < vecObsSizeT; j++)
                {
                    tensorProxy.Data[agentIndex, j] = vectorObs[j];
                }
                agentIndex++;
            }
        }
Beispiel #12
0
        public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, _allocator);

            var maskSize   = tensorProxy.Shape[tensorProxy.Shape.Length - 1];
            var agentIndex = 0;

            foreach (var agent in agentInfo.Keys)
            {
                var maskList = agentInfo[agent].actionMasks;
                for (var j = 0; j < maskSize; j++)
                {
                    var isUnmasked = (maskList != null && maskList[j]) ? 0.0f : 1.0f;
                    tensorProxy.Data[agentIndex, j] = isUnmasked;
                }
                agentIndex++;
            }
        }
Beispiel #13
0
        public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, _allocator);

            var actionSize = tensorProxy.Shape[tensorProxy.Shape.Length - 1];
            var agentIndex = 0;

            foreach (var agent in agentInfo.Keys)
            {
                var pastAction = agentInfo[agent].storedVectorActions;
                for (var j = 0; j < actionSize; j++)
                {
                    tensorProxy.Data[agentIndex, j] = pastAction[j];
                }

                agentIndex++;
            }
        }
        public void Generate(
            TensorProxy tensorProxy, int batchSize, IEnumerable <Agent> agents)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex  = 0;

            foreach (var agent in agents)
            {
                var info      = agent.Info;
                var vectorObs = info.stackedVectorObservation;
                for (var j = 0; j < vecObsSizeT; j++)
                {
                    tensorProxy.data[agentIndex, j] = vectorObs[j];
                }
                agentIndex++;
            }
        }
Beispiel #15
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var info       = infoSensorPair.agentInfo;
                var pastAction = info.storedVectorActions;
                for (var j = 0; j < actionSize; j++)
                {
                    tensorProxy.data[agentIndex, j] = pastAction[j];
                }

                agentIndex++;
            }
        }
Beispiel #16
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var maskSize   = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var agentInfo = infoSensorPair.agentInfo;
                var maskList  = agentInfo.actionMasks;
                for (var j = 0; j < maskSize; j++)
                {
                    var isUnmasked = (maskList != null && maskList[j]) ? 0.0f : 1.0f;
                    tensorProxy.data[agentIndex, j] = isUnmasked;
                }
                agentIndex++;
            }
        }
Beispiel #17
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var sensor = infoSensorPair.sensors[m_SensorIndex];
                if (infoSensorPair.agentInfo.done)
                {
                    // If the agent is done, we might have a stale reference to the sensors
                    // e.g. a dependent object might have been disposed.
                    // To avoid this, just fill observation with zeroes instead of calling sensor.Write.
                    TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f);
                }
                else
                {
                    m_WriteAdapter.SetTarget(tensorProxy, agentIndex, 0);
                    sensor.Write(m_WriteAdapter);
                }
                agentIndex++;
            }
        }
Beispiel #18
0
        public void Generate(
            TensorProxy tensorProxy, int batchSize, IEnumerable <Agent> agents)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var memorySize = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var agent in agents)
            {
                var          info = agent.Info;
                List <float> memory;

                if (agent.Info.done)
                {
                    m_Memories.Remove(agent.Info.id);
                }
                if (!m_Memories.TryGetValue(agent.Info.id, out memory))
                {
                    for (var j = 0; j < memorySize; j++)
                    {
                        tensorProxy.data[agentIndex, j] = 0;
                    }
                    agentIndex++;
                    continue;
                }
                for (var j = 0; j < Math.Min(memorySize, memory.Count); j++)
                {
                    if (j >= memory.Count)
                    {
                        break;
                    }
                    tensorProxy.data[agentIndex, j] = memory[j];
                }
                agentIndex++;
            }
        }
Beispiel #19
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var          info   = infoSensorPair.agentInfo;
                var          offset = memorySize * m_MemoryIndex;
                List <float> memory;
                if (info.done)
                {
                    m_Memories.Remove(info.episodeId);
                }
                if (!m_Memories.TryGetValue(info.episodeId, out memory))
                {
                    for (var j = 0; j < memorySize; j++)
                    {
                        tensorProxy.data[agentIndex, j] = 0;
                    }
                    agentIndex++;
                    continue;
                }
                for (var j = 0; j < memorySize; j++)
                {
                    if (j >= memory.Count)
                    {
                        break;
                    }

                    tensorProxy.data[agentIndex, j] = memory[j + offset];
                }
                agentIndex++;
            }
        }
Beispiel #20
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex  = 0;

            foreach (var info in infos)
            {
                if (info.agentInfo.done)
                {
                    // If the agent is done, we might have a stale reference to the sensors
                    // e.g. a dependent object might have been disposed.
                    // To avoid this, just fill observation with zeroes instead of calling sensor.Write.
                    TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f);
                }
                else
                {
                    var tensorOffset = 0;
                    // Write each sensor consecutively to the tensor
                    foreach (var sensorIndex in m_SensorIndices)
                    {
                        var sensor = info.sensors[sensorIndex];
                        m_WriteAdapter.SetTarget(tensorProxy, agentIndex, tensorOffset);
                        var numWritten = sensor.Write(m_WriteAdapter);
                        tensorOffset += numWritten;
                    }
                    Debug.AssertFormat(
                        tensorOffset == vecObsSizeT,
                        "mismatch between vector observation size ({0}) and number of observations written ({1})",
                        vecObsSizeT, tensorOffset
                        );
                }

                agentIndex++;
            }
        }
        /// <summary>
        /// Generates the Tensor inputs that are expected to be present in the Model.
        /// </summary>
        /// <returns>TensorProxy IEnumerable with the expected Tensor inputs</returns>
        public IReadOnlyList <TensorProxy> GetInputTensors()
        {
            List <TensorProxy> tensors = new List <TensorProxy>();

            if (_model == null)
            {
                return(tensors);
            }

            foreach (var input in _model.inputs)
            {
                tensors.Add(new TensorProxy
                {
                    Name      = input.name,
                    ValueType = TensorProxy.TensorType.FloatingPoint,
                    Data      = null,
                    Shape     = input.shape.Select(i => (long)i).ToArray()
                });
            }

            foreach (var mem in _model.memories)
            {
                //Debug.Log($"{mem.input}: {mem.shape} -> {BarracudaUtils.TensorShapeFromBarracuda(mem.shape).Length}");
                tensors.Add(new TensorProxy
                {
                    Name      = mem.input,
                    ValueType = TensorProxy.TensorType.FloatingPoint,
                    Data      = null,
                    Shape     = TensorUtils.TensorShapeFromBarracuda(mem.shape)
                });
            }

            tensors.Sort((el1, el2) => el1.Name.CompareTo(el2.Name));

            return(tensors);
        }
Beispiel #22
0
 public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, _allocator);
     _randomNormal.FillTensor(tensorProxy);
 }
Beispiel #23
0
 public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
     TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal);
 }
Beispiel #24
0
 public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
 }
Beispiel #25
0
 public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
 }
Beispiel #26
0
 public void Generate(
     TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
     TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal);
 }
Beispiel #27
0
        public int ExecuteGraph(IEnumerable <TensorProxy> inputs_it, IEnumerable <TensorProxy> outputs_it)
        {
            Profiler.BeginSample("TFSharpInferenceComponent.ExecuteGraph");
            TensorProxy[] inputs  = inputs_it.ToArray();
            TensorProxy[] outputs = outputs_it.ToArray();

            // TODO: Can/should we pre-allocate that?
            TFSession.Runner runner = m_session.GetRunner();

            inputs.ToList().ForEach((TensorProxy input) =>
            {
                if (input.Shape.Length == 0)
                {
                    var data = input.Data[0];
                    if (input.DataType == typeof(int))
                    {
                        runner.AddInput(m_graph[input.Name][0], (int)data);
                    }
                    else
                    {
                        runner.AddInput(m_graph[input.Name][0], (float)data);
                    }
                }
                else
                {
                    runner.AddInput(m_graph[input.Name][0], input.DataType == typeof(int) ?
                                    TensorUtils.BarracudaToIntArray(input.Data) :
                                    TensorUtils.BarracudaToFloatArray(input.Data));
                }
            });

            // TODO: better way to pre-allocate this?
            outputs.ToList().ForEach(s => runner.Fetch(s.Name));

            TFStatus status = new TFStatus();

            Profiler.BeginSample("TFSharpInferenceComponent.ExecuteGraph.RunnerRun");
            var out_tensors = runner.Run(status);

            Profiler.EndSample();

            if (!status.Ok)
            {
                Debug.LogError(status.StatusMessage);
                return(-1);
            }

            Debug.Assert(outputs.Length == out_tensors.Length);

            for (var i = 0; i < outputs.Length; ++i)
            {
                if (outputs[i].Shape.Length == 0)
                {
                    // Handle scalars
                    outputs[i].Data    = new Tensor(1, 1);
                    outputs[i].Data[0] = (float)(int)out_tensors[i].GetValue();
                }
                else
                {
                    outputs[i].Data = TensorUtils.ArrayToBarracuda(out_tensors[i].GetValue() as Array);
                }
            }

            Profiler.EndSample();
            // TODO: create error codes
            return(0);
        }