示例#1
0
        public void Apply(TensorProxy tensorProxy, IEnumerable <int> actionIds, Dictionary <int, float[]> lastActions)
        {
            var agentIndex = 0;
            var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1];

            foreach (int agentId in actionIds)
            {
                List <float> memory = null;
                if (!m_Memories.TryGetValue(agentId, out memory) ||
                    memory.Count < memorySize * m_MemoriesCount)
                {
                    memory = new List <float>();
                    memory.AddRange(Enumerable.Repeat(0f, memorySize * m_MemoriesCount));
                }

                for (var j = 0; j < memorySize; j++)
                {
                    memory[memorySize * m_MemoryIndex + j] = tensorProxy.data[agentIndex, j];
                }

                m_Memories[agentId] = memory;
                agentIndex++;
            }
        }
示例#2
0
        /// <summary>
        /// Queries the InferenceEngine for the value of a variable in the graph given its name.
        /// Only works with int32 Tensors with zero dimensions containing a unique element.
        /// If the node was not found or could not be retrieved, the value -1 will be returned.
        /// </summary>
        /// <param name="name">The name of the Tensor variable</param>
        /// <returns>The value of the scalar variable in the model. (-1 if not found)</returns>
        private int GetIntScalar(string name)
        {
            var outputs = new TensorProxy[]
            {
                new TensorProxy()
                {
                    Name      = name,
                    ValueType = TensorProxy.TensorType.Integer,
                    Shape     = new long[] { },
                    Data      = new Tensor(1, 1)
                },
            };

            try
            {
                _engine.ExecuteGraph(new TensorProxy[0], outputs);
            }
            catch (Exception ex)
            {
                UnityEngine.Debug.LogError($"Failed to execute GetIntScalar()\n{ex}");
                return(-1);
            }
            return((int)outputs[0].Data[0]);
        }
示例#3
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <Agent> agents)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var agent in agents)
            {
                var          offset = memorySize * m_MemoryIndex;
                List <float> memory;
                if (agent.Info.done)
                {
                    m_Memories.Remove(agent.Info.id);
                }
                if (!m_Memories.TryGetValue(agent.Info.id, out memory))
                {
                    for (var j = 0; j < memorySize; j++)
                    {
                        tensorProxy.data[agentIndex, j] = 0;
                    }
                    agentIndex++;
                    continue;
                }
                for (var j = 0; j < memorySize; j++)
                {
                    if (j >= memory.Count)
                    {
                        break;
                    }

                    tensorProxy.data[agentIndex, j] = memory[j + offset];
                }
                agentIndex++;
            }
        }
示例#4
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex  = 0;

            foreach (var info in infos)
            {
                if (info.agentInfo.done)
                {
                    // If the agent is done, we might have a stale reference to the sensors
                    // e.g. a dependent object might have been disposed.
                    // To avoid this, just fill observation with zeroes instead of calling sensor.Write.
                    TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f);
                }
                else
                {
                    var tensorOffset = 0;
                    // Write each sensor consecutively to the tensor
                    foreach (var sensorIndex in m_SensorIndices)
                    {
                        var sensor = info.sensors[sensorIndex];
                        m_WriteAdapter.SetTarget(tensorProxy, agentIndex, tensorOffset);
                        var numWritten = sensor.Write(m_WriteAdapter);
                        tensorOffset += numWritten;
                    }
                    Debug.AssertFormat(
                        tensorOffset == vecObsSizeT,
                        "mismatch between vector observation size ({0}) and number of observations written ({1})",
                        vecObsSizeT, tensorOffset
                        );
                }

                agentIndex++;
            }
        }
示例#5
0
 public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
 {
     tensorProxy.data?.Dispose();
     tensorProxy.data    = m_Allocator.Alloc(new TensorShape(1, 1));
     tensorProxy.data[0] = batchSize;
 }
示例#6
0
 public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
     TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal);
 }
示例#7
0
 public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
 }
示例#8
0
        public void Apply(TensorProxy tensorProxy, IEnumerable <int> actionIds, Dictionary <int, float[]> lastActions)
        {
            //var tensorDataProbabilities = tensorProxy.Data as float[,];
            var idActionPairList   = actionIds as List <int> ?? actionIds.ToList();
            var batchSize          = idActionPairList.Count;
            var actionValues       = new float[batchSize, m_ActionSize.Length];
            var startActionIndices = Utilities.CumSum(m_ActionSize);

            for (var actionIndex = 0; actionIndex < m_ActionSize.Length; actionIndex++)
            {
                var nBranchAction = m_ActionSize[actionIndex];
                var actionProbs   = new TensorProxy()
                {
                    valueType = TensorProxy.TensorType.FloatingPoint,
                    shape     = new long[] { batchSize, nBranchAction },
                    data      = m_Allocator.Alloc(new TensorShape(batchSize, nBranchAction))
                };

                for (var batchIndex = 0; batchIndex < batchSize; batchIndex++)
                {
                    for (var branchActionIndex = 0;
                         branchActionIndex < nBranchAction;
                         branchActionIndex++)
                    {
                        actionProbs.data[batchIndex, branchActionIndex] =
                            tensorProxy.data[batchIndex, startActionIndices[actionIndex] + branchActionIndex];
                    }
                }

                var outputTensor = new TensorProxy()
                {
                    valueType = TensorProxy.TensorType.FloatingPoint,
                    shape     = new long[] { batchSize, 1 },
                    data      = m_Allocator.Alloc(new TensorShape(batchSize, 1))
                };

                Eval(actionProbs, outputTensor, m_Multinomial);

                for (var ii = 0; ii < batchSize; ii++)
                {
                    actionValues[ii, actionIndex] = outputTensor.data[ii, 0];
                }
                actionProbs.data.Dispose();
                outputTensor.data.Dispose();
            }
            var agentIndex = 0;

            foreach (int agentId in actionIds)
            {
                if (lastActions.ContainsKey(agentId))
                {
                    var actionVal = lastActions[agentId];
                    if (actionVal == null)
                    {
                        actionVal            = new float[m_ActionSize.Length];
                        lastActions[agentId] = actionVal;
                    }
                    for (var j = 0; j < m_ActionSize.Length; j++)
                    {
                        actionVal[j] = actionValues[agentIndex, j];
                    }
                }
                agentIndex++;
            }
        }
示例#9
0
        private TensorProxy GetOpMetadata(TFOperation op)
        {
            TFStatus status = new TFStatus();

            // Query the shape
            long[] shape      = null;
            var    shape_attr = op.GetAttributeMetadata("shape", status);

            if (!status.Ok || shape_attr.TotalSize <= 0)
            {
                Debug.LogWarning($"Operation {op.Name} does not contain shape attribute or it" +
                                 $" doesn't contain valid shape data! Status: {status.StatusMessage}");
            }
            else
            {
                if (shape_attr.IsList)
                {
                    throw new NotImplementedException("Querying lists is not implemented yet!");
                }
                else
                {
                    TFStatus s    = new TFStatus();
                    long[]   dims = new long[shape_attr.TotalSize];
                    TF_OperationGetAttrShape(op.Handle, "shape", dims, (int)shape_attr.TotalSize,
                                             s.Handle);
                    if (!status.Ok)
                    {
                        throw new FormatException("Could not query model for op shape (" + op.Name + ")");
                    }
                    else
                    {
                        shape = new long[dims.Length];
                        for (int i = 0; i < shape_attr.TotalSize; ++i)
                        {
                            if (dims[i] == -1)
                            {
                                // we have to use batchsize 1
                                shape[i] = 1;
                            }
                            else
                            {
                                shape[i] = dims[i];
                            }
                        }
                    }
                }
            }

            // Query the data type
            TFDataType type_value = new TFDataType();

            unsafe
            {
                TFStatus s = new TFStatus();
                TF_OperationGetAttrType(op.Handle, "dtype", &type_value, s.Handle);
                if (!s.Ok)
                {
                    Debug.LogWarning("Operation " + op.Name +
                                     ": error retrieving dtype, assuming float!");
                    type_value = TFDataType.Float;
                }
            }

            TensorProxy.TensorType placeholder_type = TensorProxy.TensorType.FloatingPoint;
            switch (type_value)
            {
            case TFDataType.Float:
                placeholder_type = TensorProxy.TensorType.FloatingPoint;
                break;

            case TFDataType.Int32:
                placeholder_type = TensorProxy.TensorType.Integer;
                break;

            default:
                Debug.LogWarning("Operation " + op.Name +
                                 " is not a float/integer. Proceed at your own risk!");
                break;
            }

            TensorProxy t = new TensorProxy
            {
                Data      = null,
                Name      = op.Name,
                Shape     = shape,
                ValueType = placeholder_type
            };

            return(t);
        }
示例#10
0
 public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
 {
     tensorProxy.data?.Dispose();
     tensorProxy.data    = m_Allocator.Alloc(new TensorShape(1, 1));
     tensorProxy.data[0] = batchSize;
 }
示例#11
0
 public void Generate(
     TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
     TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal);
 }
示例#12
0
 public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
 }
示例#13
0
 public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary <Agent, AgentInfo> agentInfo)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, _allocator);
     _randomNormal.FillTensor(tensorProxy);
 }