Example #1
0
        public static void Main(string [] args)
        {
            TensorFlowSharp.Windows.NativeBinding.Init(true);

            Console.WriteLine("Preparing");
            Console.WriteLine("TF Version: " + TFCore.Version);
            var files = options.Parse(args);

            if (dir == null)
            {
                dir = Environment.CurrentDirectory;
            }

            if (files.Count == 0)
            {
                files = new List <string>();
                string[] fis = Directory.GetFiles(Path.Combine(Environment.CurrentDirectory, "datas"));
                foreach (string name in fis)
                {
                    string lower = name.ToLower();
                    if (lower.EndsWith("jpg") || lower.EndsWith("png"))
                    {
                        files.Add(name);
                    }
                }
                if (files.Count == 0)
                {
                    Error("no input files");
                }
            }

            ModelFiles(dir);

            // Construct an in-memory graph from the serialized form.
            var graph = new TFGraph();
            // Load the serialized GraphDef from a file.
            var model = File.ReadAllBytes(modelFile);

            graph.Import(model, "");
            using (var session = new TFSession(graph))
            {
                Console.WriteLine("model start");
                Stopwatch sw = new Stopwatch();
                sw.Start();

                var labels = File.ReadAllLines(labelsFile);

                foreach (var file in files)
                {
                    // Run inference on the image files
                    // For multiple images, session.Run() can be called in a loop (and
                    // concurrently). Alternatively, images can be batched since the model
                    // accepts batches of image data as input.
                    var tensor = CreateTensorFromImageFile(file);

                    long startRun = sw.ElapsedMilliseconds;

                    var runner = session.GetRunner();
                    runner.AddInput(graph["input"][0], tensor).Fetch(graph["output"][0]);
                    var output = runner.Run();

                    Console.WriteLine($"Runner Takes {sw.ElapsedMilliseconds - startRun}ms");
                    // output[0].Value() is a vector containing probabilities of
                    // labels for each image in the "batch". The batch size was 1.
                    // Find the most probably label index.

                    var result = output[0];
                    var rshape = result.Shape;
                    foreach (long l in rshape)
                    {
                        Console.Write($"{l} ");
                    }
                    Console.WriteLine("");
                    if (result.NumDims != 2 || rshape[0] != 1)
                    {
                        var shape = "";
                        foreach (var d in rshape)
                        {
                            shape += $"{d} ";
                        }
                        shape = shape.Trim();
                        Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]");
                        Environment.Exit(1);
                    }

                    // You can get the data in two ways, as a multi-dimensional array, or arrays of arrays,
                    // code can be nicer to read with one or the other, pick it based on how you want to process
                    // it
                    bool jagged = true;

                    int   bestIdx = 0;
                    float best    = 0;

                    if (jagged)
                    {
                        var probabilities = ((float[][])result.GetValue(jagged: true))[0];
                        for (int i = 0; i < probabilities.Length; i++)
                        {
                            if (probabilities[i] > best)
                            {
                                bestIdx = i;
                                best    = probabilities[i];
                            }
                        }
                    }
                    else
                    {
                        var val = (float[, ])result.GetValue(jagged: false);

                        // Result is [1,N], flatten array
                        for (int i = 0; i < val.GetLength(1); i++)
                        {
                            if (val[0, i] > best)
                            {
                                bestIdx = i;
                                best    = val[0, i];
                            }
                        }
                    }

                    Console.WriteLine($"{Path.GetFileName(file).PadRight(20)} best match: [{bestIdx.ToString().PadRight(3)}] {(best * 100.0).ToString("0.00").PadRight(6)}%   {labels[bestIdx]}");
                }

                Console.WriteLine($"Tests finished [{sw.ElapsedMilliseconds}]");
                sw.Stop();
                Console.ReadLine();
            }
        }
Example #2
0
        public void Load()
        {
            var num = 1 * 1 + 2 * 2 + 3 * 3 + 4 * 4 + 5 * 5;

            Debug.WriteLine(num);
            Int64 startTime = Stopwatch.GetTimestamp();//获取计时器机制中当前时间的最小单位数 可以理解为时钟周期

            dir = "tmp";
            List <string> files = Directory.GetFiles("img").ToList();

            ModelFiles(dir);
            Console.WriteLine($"Load Image {files.Count}number  taking:{ (Stopwatch.GetTimestamp() - startTime) / (double)Stopwatch.Frequency}"); // 获取以每秒计时周期数表示的计时器频率。此字段为只读
            Console.WriteLine($"start Init TFGraph :{ (Stopwatch.GetTimestamp() - startTime) / (double)Stopwatch.Frequency}");                    // 获取以每秒
            var graph = new TFGraph();
            // 从文件加载序列化的GraphDef
            var model = File.ReadAllBytes(modelFile);

            //导入GraphDef
            graph.Import(model, "");
            Console.WriteLine($"end Init TFGraph :{ (Stopwatch.GetTimestamp() - startTime) / (double)Stopwatch.Frequency}");// 获取以每秒

            using (var session = new TFSession(graph))
            {
                var labels = File.ReadAllLines(labelsFile);
                foreach (var file in files)
                {
                    Console.WriteLine($"start TensorFlow图像识别 LineZero :{ (Stopwatch.GetTimestamp() - startTime) / (double)Stopwatch.Frequency}");// 获取以每秒
                    // Run inference on the image files
                    // For multiple images, session.Run() can be called in a loop (and
                    // concurrently). Alternatively, images can be batched since the model
                    // accepts batches of image data as input.
                    var tensor = CreateTensorFromImageFile(file);

                    var runner = session.GetRunner();
                    runner.AddInput(graph["input"][0], tensor).Fetch(graph["output"][0]);
                    var output = runner.Run();
                    // output[0].Value() is a vector containing probabilities of
                    // labels for each image in the "batch". The batch size was 1.
                    // Find the most probably label index.

                    var result = output[0];
                    var rshape = result.Shape;
                    if (result.NumDims != 2 || rshape[0] != 1)
                    {
                        var shape = "";
                        foreach (var d in rshape)
                        {
                            shape += $"{d} ";
                        }
                        shape = shape.Trim();
                        Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]");
                        Environment.Exit(1);
                    }

                    // You can get the data in two ways, as a multi-dimensional array, or arrays of arrays,
                    // code can be nicer to read with one or the other, pick it based on how you want to process
                    // it
                    bool jagged = true;

                    var   bestIdx = 0;
                    float p = 0, best = 0;

                    if (jagged)
                    {
                        var probabilities = ((float[][])result.GetValue(jagged: true))[0];
                        for (int i = 0; i < probabilities.Length; i++)
                        {
                            if (probabilities[i] > best)
                            {
                                bestIdx = i;
                                best    = probabilities[i];
                            }
                        }
                    }
                    else
                    {
                        var val = (float[, ])result.GetValue(jagged: false);

                        // Result is [1,N], flatten array
                        for (int i = 0; i < val.GetLength(1); i++)
                        {
                            if (val[0, i] > best)
                            {
                                bestIdx = i;
                                best    = val[0, i];
                            }
                        }
                    }

                    Console.WriteLine($"{Path.GetFileName(file)} 最佳匹配: [{bestIdx}] {best * 100.0}% 标识为:{labels[bestIdx]}");
                    Console.WriteLine("end TensorFlow图像识别 LineZero" + (Stopwatch.GetTimestamp() - startTime) / (double)Stopwatch.Frequency);// 获取以每秒计时周期数表示的计时器频率。此字段为只读
                }
            }
        }
        public void MNISTTwoHiddenLayerNetworkTest()
        {
            // Parameters
            var learningRate = 0.1f;
            var epochs       = 5;


            var mnist = new Mnist();

            mnist.ReadDataSets("/tmp");
            int batchSize  = 100;
            int numBatches = mnist.TrainImages.Length / batchSize;

            using (var graph = new TFGraph())
            {
                var X = graph.Placeholder(TFDataType.Float, new TFShape(-1, 784));
                var Y = graph.Placeholder(TFDataType.Float, new TFShape(-1, 10));

                graph.Seed = 1;
                var initB  = (float)(4 * Math.Sqrt(6) / Math.Sqrt(784 + 500));
                var W1     = graph.Variable(graph.RandomUniform(new TFShape(784, 500), minval: -initB, maxval: initB), operName: "W1");
                var b1     = graph.Variable(graph.Constant(0f, new TFShape(500), TFDataType.Float), operName: "b1");
                var layer1 = graph.Sigmoid(graph.Add(graph.MatMul(X, W1.Read), b1.Read, operName: "layer1"));

                initB = (float)(4 * Math.Sqrt(6) / Math.Sqrt(500 + 100));
                var W2     = graph.Variable(graph.RandomUniform(new TFShape(500, 100), minval: -initB, maxval: initB), operName: "W2");
                var b2     = graph.Variable(graph.Constant(0f, new TFShape(100), TFDataType.Float), operName: "b2");
                var layer2 = graph.Sigmoid(graph.Add(graph.MatMul(layer1, W2.Read), b2.Read, operName: "layer2"));

                initB = (float)(4 * Math.Sqrt(6) / Math.Sqrt(100 + 10));
                var W3     = graph.Variable(graph.RandomUniform(new TFShape(100, 10), minval: -initB, maxval: initB), operName: "W3");
                var b3     = graph.Variable(graph.Constant(0f, new TFShape(10), TFDataType.Float), operName: "b3");
                var layer3 = graph.Add(graph.MatMul(layer2, W3.Read), b3.Read, operName: "layer3");

                // No support for computing gradient for the SparseSoftmaxCrossEntropyWithLogits function
                // instead using SoftmaxCrossEntropyWithLogits
                var cost = graph.ReduceMean(graph.SoftmaxCrossEntropyWithLogits(layer3, Y, "cost").loss);

                var prediction = graph.ArgMax(graph.Softmax(layer3), graph.Const(1));
                var labels     = graph.ArgMax(Y, graph.Const(1));
                var areCorrect = graph.Equal(prediction, labels);
                var accuracy   = graph.ReduceMean(graph.Cast(areCorrect, TFDataType.Float));

                var sgd       = new SGD(graph, learningRate, 0.9f);
                var updateOps = sgd.Minimize(cost);

                using (var sesssion = new TFSession(graph))
                {
                    sesssion.GetRunner().AddTarget(graph.GetGlobalVariablesInitializer()).Run();

                    var expectedLines = File.ReadAllLines(Path.Combine(_testDataPath, "SGDMnist", "expected.txt"));

                    for (int i = 0; i < epochs; i++)
                    {
                        var   reader      = mnist.GetTrainReader();
                        float avgLoss     = 0;
                        float avgAccuracy = 0;
                        for (int j = 0; j < numBatches; j++)
                        {
                            var batch   = reader.NextBatch(batchSize);
                            var tensors = sesssion.GetRunner()
                                          .AddInput(X, batch.Item1)
                                          .AddInput(Y, batch.Item2)
                                          .AddTarget(updateOps).Fetch(cost, accuracy, prediction, labels).Run();

                            avgLoss     += (float)tensors[0].GetValue();
                            avgAccuracy += (float)tensors[1].GetValue();
                        }
                        var output = $"Epoch: {i}, loss(Cross-Entropy): {avgLoss / numBatches:F4}, Accuracy:{avgAccuracy / numBatches:F4}";
                        Assert.Equal(expectedLines[i], output);
                    }
                }
            }
        }
Example #4
0
        public override IObservable <Pose> Process(IObservable <IplImage> source)
        {
            return(Observable.Defer(() =>
            {
                TFSessionOptions options = new TFSessionOptions();
                unsafe
                {
                    byte[] GPUConfig = new byte[] { 0x32, 0x02, 0x20, 0x01 };
                    fixed(void *ptr = &GPUConfig[0])
                    {
                        options.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                    }
                }

                var graph = new TFGraph();
                var session = new TFSession(graph, options, null);
                var bytes = File.ReadAllBytes(ModelFileName);
                graph.Import(bytes);

                IplImage temp = null;
                TFTensor tensor = null;
                TFSession.Runner runner = null;
                var config = ConfigHelper.PoseConfig(PoseConfigFileName);
                return source.Select(input =>
                {
                    var poseScale = 1.0;
                    const int TensorChannels = 3;
                    var frameSize = input.Size;
                    var scaleFactor = ScaleFactor;
                    if (scaleFactor.HasValue)
                    {
                        poseScale = scaleFactor.Value;
                        frameSize.Width = (int)(frameSize.Width * poseScale);
                        frameSize.Height = (int)(frameSize.Height * poseScale);
                        poseScale = 1.0 / poseScale;
                    }

                    if (tensor == null || tensor.GetTensorDimension(1) != frameSize.Height || tensor.GetTensorDimension(2) != frameSize.Width)
                    {
                        tensor = new TFTensor(
                            TFDataType.Float,
                            new long[] { 1, frameSize.Height, frameSize.Width, TensorChannels },
                            frameSize.Width * frameSize.Height * TensorChannels * sizeof(float));
                        runner = session.GetRunner();
                        runner.AddInput(graph["Placeholder"][0], tensor);
                        runner.Fetch(graph["concat_1"][0]);
                    }

                    var frame = input;
                    if (frameSize != input.Size)
                    {
                        if (temp == null || temp.Size != frameSize)
                        {
                            temp = new IplImage(frameSize, input.Depth, input.Channels);
                        }

                        CV.Resize(input, temp);
                        frame = temp;
                    }

                    using (var image = new IplImage(frameSize, IplDepth.F32, TensorChannels, tensor.Data))
                    {
                        CV.Convert(frame, image);
                    }

                    // Run the model
                    var output = runner.Run();

                    // Fetch the results from output:
                    var poseTensor = output[0];
                    var pose = new Mat((int)poseTensor.Shape[0], (int)poseTensor.Shape[1], Depth.F32, 1, poseTensor.Data);
                    var result = new Pose(input);
                    var threshold = MinConfidence;
                    for (int i = 0; i < pose.Rows; i++)
                    {
                        BodyPart bodyPart;
                        bodyPart.Name = config[i];
                        bodyPart.Confidence = (float)pose.GetReal(i, 2);
                        if (bodyPart.Confidence < threshold)
                        {
                            bodyPart.Position = new Point2f(float.NaN, float.NaN);
                        }
                        else
                        {
                            bodyPart.Position.X = (float)(pose.GetReal(i, 1) * poseScale);
                            bodyPart.Position.Y = (float)(pose.GetReal(i, 0) * poseScale);
                        }
                        result.Add(bodyPart);
                    }
                    return result;
                });
            }));
        }
Example #5
0
        /// <summary>
        /// The utility processes the image and produces output image highlighting detected objects on it.
        /// You need to proceed following steps to get the example working:
        /// 1. Download and unzip one of trained models from
        /// https://github.com/tensorflow/models/blob/master/object_detection/g3doc/detection_model_zoo.md
        ///
        /// for instance 'faster_rcnn_inception_resnet_v2_atrous_coco'
        /// 2. Download mscoco_label_map.pbtxt from
        /// https://github.com/tensorflow/models/blob/master/object_detection/data/mscoco_label_map.pbtxt
        ///
        /// 3. Run the ExampleObjectDetection util from command line specifying input_image, output_image, catalog and model options
        /// where input_image - the path to the image for processing
        /// output_image - the path where the image with detected objects will be saved
        /// catalog - the path to the 'mscoco_label_map.pbtxt' file (see 2)
        /// model - the path to the 'frozen_inference_graph.pb' file (see 1)
        ///
        /// for instance,
        /// ExampleObjectDetection --input_image="/demo/input.jpg" --output_image="/demo/output.jpg" --catalog="/demo/mscoco_label_map.pbtxt" --model="/demo/frozen_inference_graph.pb"
        /// </summary>
        /// <param name="args"></param>
        static void Main(string [] args)
        {
            options.Parse(args);

            if (_input == null)
            {
                throw new ArgumentException("Missing required option --input_image=");
            }

            if (_output == null)
            {
                throw new ArgumentException("Missing required option --output_image=");
            }

            if (_catalogPath == null)
            {
                throw new ArgumentException("Missing required option --catalog=");
            }

            if (_modelPath == null)
            {
                throw new ArgumentException("Missing required option --model=");
            }

            _catalog = CatalogUtil.ReadCatalogItems(_catalogPath);
            var fileTuples = new List <(string input, string output)> ()
            {
                (_input, _output)
            };
            string modelFile = _modelPath;

            using (var graph = new TFGraph()) {
                var model = File.ReadAllBytes(modelFile);
                graph.Import(new TFBuffer(model));

                using (var session = new TFSession(graph)) {
                    foreach (var tuple in fileTuples)
                    {
                        var tensor = ImageUtil.CreateTensorFromImageFile(tuple.input, TFDataType.UInt8);
                        var runner = session.GetRunner();


                        runner
                        .AddInput(graph ["image_tensor"] [0], tensor)
                        .Fetch(
                            graph ["detection_boxes"] [0],
                            graph ["detection_scores"] [0],
                            graph ["detection_classes"] [0],
                            graph ["num_detections"] [0]);
                        var output = runner.Run();

                        var boxes   = (float [, , ])output [0].GetValue(jagged: false);
                        var scores  = (float [, ])output [1].GetValue(jagged: false);
                        var classes = (float [, ])output [2].GetValue(jagged: false);
                        var num     = (float [])output [3].GetValue(jagged: false);

                        DrawBoxes(boxes, scores, classes, tuple.input, tuple.output, MIN_SCORE_FOR_OBJECT_HIGHLIGHTING);
                    }
                }
            }
        }
Example #6
0
    /// Uses the stored information to run the tensorflow graph and generate
    /// the actions.
    public void DecideAction()
    {
#if ENABLE_TENSORFLOW
        if (currentBatchSize == 0)
        {
            return;
        }

        var runner = session.GetRunner();
        runner.Fetch(graph[graphScope + ActionPlaceholderName][0]);

        if (hasBatchSize)
        {
            runner.AddInput(graph[graphScope + BatchSizePlaceholderName][0], new int[] { currentBatchSize });
        }

        foreach (TensorFlowAgentPlaceholder placeholder in graphPlaceholders)
        {
            try
            {
                if (placeholder.valueType == TensorFlowAgentPlaceholder.tensorType.FloatingPoint)
                {
                    runner.AddInput(graph[graphScope + placeholder.name][0], new float[] { Random.Range(placeholder.minValue, placeholder.maxValue) });
                }
                else if (placeholder.valueType == TensorFlowAgentPlaceholder.tensorType.Integer)
                {
                    runner.AddInput(graph[graphScope + placeholder.name][0], new int[] { Random.Range((int)placeholder.minValue, (int)placeholder.maxValue + 1) });
                }
            }
            catch
            {
                throw new UnityAgentsException(string.Format(@"One of the Tensorflow placeholder cound nout be found.
                In brain {0}, there are no {1} placeholder named {2}.",
                                                             brain.gameObject.name, placeholder.valueType.ToString(), graphScope + placeholder.name));
            }
        }

        // Create the state tensor
        if (hasState)
        {
            if (brain.brainParameters.stateSpaceType == StateType.discrete)
            {
                int[,] discreteInputState = new int[currentBatchSize, 1];
                for (int i = 0; i < currentBatchSize; i++)
                {
                    discreteInputState[i, 0] = (int)inputState[i, 0];
                }
                runner.AddInput(graph[graphScope + StatePlacholderName][0], discreteInputState);
            }
            else
            {
                runner.AddInput(graph[graphScope + StatePlacholderName][0], inputState);
            }
        }

        // Create the observation tensors
        for (int obs_number = 0; obs_number < brain.brainParameters.cameraResolutions.Length; obs_number++)
        {
            runner.AddInput(graph[graphScope + ObservationPlaceholderName[obs_number]][0], observationMatrixList[obs_number]);
        }

        if (hasRecurrent)
        {
            runner.AddInput(graph[graphScope + RecurrentInPlaceholderName][0], inputOldMemories);
            runner.Fetch(graph[graphScope + RecurrentOutPlaceholderName][0]);
        }

        TFTensor[] networkOutput;
        try
        {
            networkOutput = runner.Run();
        }
        catch (TFException e)
        {
            string errorMessage = e.Message;
            try
            {
                errorMessage = string.Format(@"The tensorflow graph needs an input for {0} of type {1}",
                                             e.Message.Split(new string[] { "Node: " }, 0)[1].Split('=')[0],
                                             e.Message.Split(new string[] { "dtype=" }, 0)[1].Split(',')[0]);
            }
            finally
            {
                throw new UnityAgentsException(errorMessage);
            }
        }

        // Create the recurrent tensor
        if (hasRecurrent)
        {
            Dictionary <int, float[]> new_memories = new Dictionary <int, float[]>();

            float[,] recurrent_tensor = networkOutput[1].GetValue() as float[, ];

            int i = 0;
            foreach (int k in agentKeys)
            {
                float[] m = new float[brain.brainParameters.memorySize];
                for (int j = 0; j < brain.brainParameters.memorySize; j++)
                {
                    m[j] = recurrent_tensor[i, j];
                }
                new_memories.Add(k, m);
                i++;
            }

            brain.SendMemories(new_memories);
        }

        Dictionary <int, float[]> actions = new Dictionary <int, float[]>();

        if (brain.brainParameters.actionSpaceType == StateType.continuous)
        {
            float[,] output = networkOutput[0].GetValue() as float[, ];
            int i = 0;
            foreach (int k in agentKeys)
            {
                float[] a = new float[brain.brainParameters.actionSize];
                for (int j = 0; j < brain.brainParameters.actionSize; j++)
                {
                    a[j] = output[i, j];
                }
                actions.Add(k, a);
                i++;
            }
        }
        else if (brain.brainParameters.actionSpaceType == StateType.discrete)
        {
            long[,] output = networkOutput[0].GetValue() as long[, ];
            int i = 0;
            foreach (int k in agentKeys)
            {
                float[] a = new float[1] {
                    (float)(output[i, 0])
                };
                actions.Add(k, a);
                i++;
            }
        }

        brain.SendActions(actions);
#endif
    }
Example #7
0
    /// Loads the tensorflow graph model to generate a TFGraph object
    public void InitializeCoreBrain(Communicator communicator)
    {
#if ENABLE_TENSORFLOW
#if UNITY_ANDROID
        // This needs to ba called only once and will raise an exception if
        // there are multiple internal brains
        try{
            TensorFlowSharp.Android.NativeBinding.Init();
        }
        catch {
        }
#endif
        if ((communicator == null) ||
            (!broadcast))
        {
            coord = null;
        }
        else if (communicator is ExternalCommunicator)
        {
            coord = (ExternalCommunicator)communicator;
            coord.SubscribeBrain(brain);
        }

        //if (graphModel != null)
        if (graphModel != null || LoadExternal.instance.maxIteration > 0)
        {
            graph = new TFGraph();
            if (graphModel != null)
            {
                graph.Import(graphModel.bytes);
            }
            else
            {
                graph.Import(LoadExternal.instance.iterationData[LoadExternal.instance.maxIteration - 1]);
            }

            session = new TFSession(graph);

            // TODO: Make this a loop over a dynamic set of graph inputs

            if ((graphScope.Length > 1) && (graphScope[graphScope.Length - 1] != '/'))
            {
                graphScope = graphScope + '/';
            }

            if (graph[graphScope + BatchSizePlaceholderName] != null)
            {
                hasBatchSize = true;
            }
            if ((graph[graphScope + RecurrentInPlaceholderName] != null) && (graph[graphScope + RecurrentOutPlaceholderName] != null))
            {
                hasRecurrent = true;
                var runner = session.GetRunner();
                runner.Fetch(graph[graphScope + "memory_size"][0]);
                var networkOutput = runner.Run()[0].GetValue();
                memorySize = (int)networkOutput;
            }
            if (graph[graphScope + VectorObservationPlacholderName] != null)
            {
                hasState = true;
            }
            if (graph[graphScope + PreviousActionPlaceholderName] != null)
            {
                hasPrevAction = true;
            }
        }
        observationMatrixList = new List <float[, , , ]>();
        texturesHolder        = new List <Texture2D>();
#endif
    }
Example #8
0
        public void Export(string newModelDir)
        {
            if (newModelDir.Last() != '/' && newModelDir.Last() != '\\')
            {
                newModelDir += "/";
            }

            TFOutput NodeSaver, NodeSaverPath;

            //if (!ForTraining)
            {
                NodeSaver     = Graph["save_1/control_dependency"][0];
                NodeSaverPath = Graph["save_1/Const"][0];
            }
            //else
            //{
            //    NodeSaver = Graph["save_2/control_dependency"][0];
            //    NodeSaverPath = Graph["save_2/Const"][0];
            //}

            Directory.CreateDirectory(newModelDir);
            if (Directory.Exists(newModelDir + "variables"))
            {
                Directory.Delete(newModelDir + "variables", true);
            }
            Directory.CreateDirectory(newModelDir + "variables");

            foreach (var fileName in Directory.EnumerateFiles(ModelDir))
            {
                string Source      = fileName;
                string Destination = newModelDir + Helper.PathToNameWithExtension(fileName);

                bool AreSame = false;
                try
                {
                    AreSame = Helper.NormalizePath(Source) == Helper.NormalizePath(Destination);
                }
                catch { }

                if (!AreSame)
                {
                    File.Copy(fileName, newModelDir + Helper.PathToNameWithExtension(fileName), true);
                }
            }

            TFTensor TensorPath = TFTensor.CreateString(Encoding.ASCII.GetBytes(newModelDir + "variables/variables"));
            var      Runner     = Session.GetRunner().AddInput(NodeSaverPath, TensorPath);

            Runner.Run(NodeSaver);

            if (Directory.EnumerateDirectories(newModelDir + "variables", "variables_temp*").Count() > 0)
            {
                string TempName = Directory.EnumerateDirectories(newModelDir + "variables", "variables_temp*").First();
                foreach (var oldPath in Directory.EnumerateFiles(TempName))
                {
                    string OldName = Helper.PathToNameWithExtension(oldPath);
                    string NewName = "variables" + OldName.Substring(OldName.IndexOf("."));
                    string NewPath = newModelDir + "variables/" + NewName;

                    File.Move(oldPath, NewPath);
                }
                Directory.Delete(TempName, true);
            }

            TensorPath.Dispose();
        }
Example #9
0
        public NoiseNet3D(string modelDir, int3 boxDimensions, int nThreads = 1, int batchSize = 8, bool forTraining = true, int deviceID = 0)
        {
            lock (TFHelper.DeviceSync[deviceID])
            {
                DeviceID      = deviceID;
                BoxDimensions = boxDimensions;
                ForTraining   = forTraining;
                ModelDir      = modelDir;
                MaxThreads    = nThreads;
                BatchSize     = batchSize;

                TFSessionOptions SessionOptions = TFHelper.CreateOptions();
                TFSession        Dummy          = new TFSession(new TFGraph(), SessionOptions);

                Session = TFHelper.FromSavedModel(SessionOptions, null, ModelDir, new[] { forTraining ? "train" : "serve" }, new TFGraph(), $"/device:GPU:{deviceID}");
                Graph   = Session.Graph;

                NodeInputSource = Graph["volume_source"][0];
                if (forTraining)
                {
                    NodeInputTarget  = Graph["volume_target"][0];
                    NodeLearningRate = Graph["training_learning_rate"][0];
                    NodeOpTrain      = Graph["train_momentum"][0];
                    NodeOutputLoss   = Graph["l2_loss"][0];
                }

                NodeOutputPredicted = Graph["volume_predict"][0];

                TensorSource = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensions.X, BoxDimensions.Y, boxDimensions.Z, 1),
                                                                               new float[BatchSize * BoxDimensions.Elements()],
                                                                               0,
                                                                               BatchSize * (int)BoxDimensions.Elements()),
                                                      nThreads);

                if (ForTraining)
                {
                    TensorTarget = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensions.X, BoxDimensions.Y, boxDimensions.Z, 1),
                                                                                   new float[BatchSize * BoxDimensions.Elements()],
                                                                                   0,
                                                                                   BatchSize * (int)BoxDimensions.Elements()),
                                                          nThreads);

                    TensorLearningRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                         new float[1],
                                                                                         0,
                                                                                         1),
                                                                nThreads);
                }

                ResultPredicted = Helper.ArrayOfFunction(i => new float[BatchSize * BoxDimensions.Elements()], nThreads);
                ResultLoss      = Helper.ArrayOfFunction(i => new float[1], nThreads);

                //if (!ForTraining)
                RunnerPrediction = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                          AddInput(NodeInputSource, TensorSource[i]).
                                                          Fetch(NodeOutputPredicted),
                                                          nThreads);
                if (ForTraining)
                {
                    RunnerTraining = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                            AddInput(NodeInputSource, TensorSource[i]).
                                                            AddInput(NodeInputTarget, TensorTarget[i]).
                                                            AddInput(NodeLearningRate, TensorLearningRate[i]).
                                                            Fetch(NodeOutputPredicted, NodeOutputLoss, NodeOpTrain),
                                                            nThreads);
                }
            }

            // Run prediction or training for one batch to claim all the memory needed
            float[] InitDecoded;
            float[] InitLoss;
            //if (!ForTraining)
            {
                Predict(new float[BoxDimensions.Elements() * BatchSize],
                        0,
                        out InitDecoded);
            }
            if (ForTraining)
            {
                RandomNormal RandN = new RandomNormal();
                Train(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.Elements()),
                      Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.Elements()),
                      1e-10f,
                      0,
                      out InitDecoded,
                      out InitLoss);
            }
        }
Example #10
0
        static void Main(string[] args)
        {
            //var capture = new VideoCapture("rtmp://rtmp.open.ys7.com/openlive/61e96da9f12a4d058f4737d02c42998d");
            var capture = new VideoCapture("D:\\视频1\\192.168.1.65_01_20190314114136657_1.mp4");

            modelFile = "logs_2\\pb\\frozen_model.pb";
            //dir = "tmp";
            //List<string> files = Directory.GetFiles("img").ToList();
            //ModelFiles(dir);
            var graph = new TFGraph();
            // 从文件加载序列化的GraphDef
            var model = File.ReadAllBytes(modelFile);

            //导入GraphDef
            graph.Import(model, "");
            using (var windowSrc = new Window("src"))
                using (var frame = new Mat())
                    using (var image缩小 = new Mat())
                        using (var session = new TFSession(graph))
                        {
                            string file = "1.jpg";
                            //var labels = File.ReadAllLines(labelsFile);
                            Console.WriteLine("TensorFlow图像识别 LineZero");

                            //var frame = new Mat();
                            //var inrange = new Mat();
                            //var fg = new Mat();

                            while (true)
                            {
                                capture.Read(frame);
                                if (frame.Empty())
                                {
                                    break;
                                }
                                Cv2.Resize(frame, image缩小, new Size(280, 280), 0, 0, InterpolationFlags.Linear);//缩小28*28

                                Cv2.ImWrite(file, image缩小);

                                var tensor = CreateTensorFromImageFile(file);



                                // Run inference on the image files
                                // For multiple images, session.Run() can be called in a loop (and
                                // concurrently). Alternatively, images can be batched since the model
                                // accepts batches of image data as input.

                                var runner = session.GetRunner();
                                runner.AddInput(graph["x_input"][0], tensor).Fetch(graph["softmax_linear/softmax_linear"][0]);
                                var output = runner.Run();
                                // output[0].Value() is a vector containing probabilities of
                                // labels for each image in the "batch". The batch size was 1.
                                // Find the most probably label index.

                                var result = output[0];
                                var rshape = result.Shape;
                                if (result.NumDims != 2 || rshape[0] != 1)
                                {
                                    var shape = "";
                                    foreach (var d in rshape)
                                    {
                                        shape += $"{d} ";
                                    }
                                    shape = shape.Trim();
                                    Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]");
                                    Environment.Exit(1);
                                }

                                // You can get the data in two ways, as a multi-dimensional array, or arrays of arrays,
                                // code can be nicer to read with one or the other, pick it based on how you want to process
                                // it
                                bool jagged = true;

                                var   bestIdx = 0;
                                float p = 0, best = 0;
                                if (jagged)
                                {
                                    var      probabilities = ((float[][])result.GetValue(jagged: true))[0];
                                    double[] d             = floatTodouble(probabilities);
                                    double[] retResult     = Softmax(d);

                                    for (int i = 0; i < retResult.Length; i++)
                                    {
                                        if (probabilities[i] > best)
                                        {
                                            bestIdx = i;
                                            best    = probabilities[i];
                                        }
                                    }
                                }
                                else
                                {
                                    var val = (float[, ])result.GetValue(jagged: false);

                                    // Result is [1,N], flatten array
                                    for (int i = 0; i < val.GetLength(1); i++)
                                    {
                                        if (val[0, i] > best)
                                        {
                                            bestIdx = i;
                                            best    = val[0, i];
                                        }
                                    }
                                }

                                //Console.WriteLine($"{Path.GetFileName(file)} 最佳匹配: [{bestIdx}] {best * 100.0}% 标识为:{labels[bestIdx]}");
                                string 标识1 = "";
                                switch (bestIdx)
                                {
                                case 0:
                                    标识1 = "kong0";
                                    break;

                                case 1:
                                    标识1 = "yao1";
                                    break;

                                case 2:
                                    标识1 = "yao2";
                                    break;

                                case 3:
                                    标识1 = "yao3";
                                    break;

                                case 4:
                                    标识1 = "yao4";
                                    break;

                                case 5:
                                    标识1 = "xian1";
                                    break;

                                case 6:
                                    标识1 = "xian2";
                                    break;

                                case 7:
                                    标识1 = "have7";
                                    break;
                                }
                                string 标识2 = "--: " + (best).ToString() + "%";

                                Point textPos = new Point(1, 100);

                                image缩小.PutText(标识1 + 标识2, textPos, HersheyFonts.HersheySimplex, 0.5, Scalar.White);


                                windowSrc.ShowImage(image缩小);
                                Cv2.WaitKey(10);
                            }
                        }
            Console.ReadKey();
        }
Example #11
0
        public CubeNet(string modelDir, int deviceID = 0, int nThreads = 1, int batchSize = 1, int nClasses = 2, bool forTraining = false)
        {
            lock (TFHelper.DeviceSync[deviceID])
            {
                DeviceID    = deviceID;
                ForTraining = forTraining;
                ModelDir    = modelDir;
                MaxThreads  = nThreads;
                BatchSize   = batchSize;
                NClasses    = nClasses;

                TFSessionOptions SessionOptions = TFHelper.CreateOptions();
                TFSession        Dummy          = new TFSession(new TFGraph(), SessionOptions);

                Session = TFHelper.FromSavedModel(SessionOptions, null, ModelDir, new[] { forTraining ? "train" : "serve" }, new TFGraph(), $"/device:GPU:{deviceID}");
                Graph   = Session.Graph;

                if (forTraining)
                {
                    NodeInputMicTile = Graph["images"][0];
                    NodeInputLabels  = Graph["image_classes"][0];
                    NodeInputWeights = Graph["image_weights"][0];
                    NodeLearningRate = Graph["training_learning_rate"][0];
                    NodeOpTrain      = Graph["train_momentum"][0];

                    NodeOutputLoss = Graph["cross_entropy"][0];
                }
                else
                {
                    NodeInputMicTilePredict = Graph["images_predict"][0];
                }

                NodeOutputArgMax  = Graph["argmax_tensor"][0];
                NodeOutputSoftMax = Graph["softmax_tensor"][0];

                if (forTraining)
                {
                    TensorMicTile = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsTrain.X, BoxDimensionsTrain.Y, BoxDimensionsTrain.Z, 1),
                                                                                    new float[BatchSize * BoxDimensionsTrain.Elements()],
                                                                                    0,
                                                                                    BatchSize * (int)BoxDimensionsTrain.Elements()),
                                                           nThreads);

                    TensorTrainingLabels = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsTrain.X, BoxDimensionsTrain.Y, BoxDimensionsTrain.Z, NClasses),
                                                                                           new float[BatchSize * BoxDimensionsTrain.Elements() * NClasses],
                                                                                           0,
                                                                                           BatchSize * (int)BoxDimensionsTrain.Elements() * NClasses),
                                                                  nThreads);

                    TensorTrainingWeights = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsTrain.X, BoxDimensionsTrain.Y, BoxDimensionsTrain.Z, 1),
                                                                                            new float[BatchSize * BoxDimensionsTrain.Elements()],
                                                                                            0,
                                                                                            BatchSize * (int)BoxDimensionsTrain.Elements()),
                                                                   nThreads);

                    TensorLearningRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                         new float[1],
                                                                                         0,
                                                                                         1),
                                                                nThreads);
                }
                else
                {
                    TensorMicTilePredict = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsPredict.X, BoxDimensionsPredict.Y, BoxDimensionsPredict.Z, 1),
                                                                                           new float[BatchSize * BoxDimensionsPredict.Elements()],
                                                                                           0,
                                                                                           BatchSize * (int)BoxDimensionsPredict.Elements()),
                                                                  nThreads);
                }

                if (forTraining)
                {
                    ResultArgMax  = Helper.ArrayOfFunction(i => new long[BatchSize * (int)BoxDimensionsTrain.Elements()], nThreads);
                    ResultSoftMax = Helper.ArrayOfFunction(i => new float[BatchSize * (int)BoxDimensionsTrain.Elements() * NClasses], nThreads);
                    ResultLoss    = Helper.ArrayOfFunction(i => new float[BatchSize], nThreads);
                }
                else
                {
                    ResultArgMax  = Helper.ArrayOfFunction(i => new long[BatchSize * (int)BoxDimensionsPredict.Elements()], nThreads);
                    ResultSoftMax = Helper.ArrayOfFunction(i => new float[BatchSize * (int)BoxDimensionsPredict.Elements() * NClasses], nThreads);
                }

                if (!ForTraining)
                {
                    RunnerPrediction = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                              AddInput(NodeInputMicTilePredict, TensorMicTilePredict[i]).
                                                              Fetch(NodeOutputArgMax, NodeOutputSoftMax),
                                                              nThreads);
                }
                if (ForTraining)
                {
                    RunnerTraining = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                            AddInput(NodeInputMicTile, TensorMicTile[i]).
                                                            AddInput(NodeInputLabels, TensorTrainingLabels[i]).
                                                            AddInput(NodeInputWeights, TensorTrainingWeights[i]).
                                                            AddInput(NodeLearningRate, TensorLearningRate[i]).
                                                            Fetch(NodeOpTrain, NodeOutputArgMax, NodeOutputSoftMax, NodeOutputLoss),
                                                            nThreads);
                }
            }

            // Run prediction or training for one batch to claim all the memory needed
            long[]  InitArgMax;
            float[] InitProb;
            if (!ForTraining)
            {
                Predict(new float[BoxDimensionsPredict.Elements() * BatchSize],
                        0,
                        out InitArgMax,
                        out InitProb);
            }
            if (ForTraining)
            {
                RandomNormal RandN = new RandomNormal();
                Train(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensionsTrain.Elements()),
                      Helper.ArrayOfConstant(0.0f, BatchSize * (int)BoxDimensionsTrain.Elements() * NClasses),
                      Helper.ArrayOfConstant(0.0f, BatchSize * (int)BoxDimensionsTrain.Elements()),
                      1e-6f,
                      0,
                      out InitArgMax,
                      out InitProb);
            }
        }
Example #12
0
        public void Configure(IApplicationBuilder app, IHostingEnvironment env, IApplicationLifetime lifetime)
        {
            var configSettings = new ConfigSettings(env);

            var modelPatch       = Path.Combine(configSettings.ContentRootPath, configSettings.ModelFileName);
            var classCatalogPath = Path.Combine(configSettings.ContentRootPath, configSettings.ClassCatalogFileName);

            if (!File.Exists(modelPatch))
            {
                new ModelLoader().Load(configSettings.ModelUrl, configSettings.ContentRootPath).Wait();
            }

            if (!File.Exists(classCatalogPath))
            {
                new ClassCatalogLoader().Load(configSettings.ClassCatalogUrl, configSettings.ContentRootPath).Wait();
            }

            var model             = File.ReadAllBytes(modelPatch);
            var classCatalogItems = new ClassCatalogParser().Parse(classCatalogPath).ToArray();
            var personClass       = classCatalogItems.Single(c => c.DisplayName == "person");

            var graph = new TFGraph();

            lifetime.ApplicationStopped.Register(graph.Dispose);
            graph.Import(new TFBuffer(model));

            app.Run(c =>
            {
                if (c.Request.Method != "POST")
                {
                    c.Response.StatusCode = 403;
                    return(Task.CompletedTask);
                }

                using (var session = new TFSession(graph))
                {
                    var tensor = TensorUtil.CreateFromImageFile(c.Request.Body, TFDataType.UInt8);
                    var output = session
                                 .GetRunner()
                                 .AddInput(graph["image_tensor"][0], tensor)
                                 .Fetch(
                        graph["detection_boxes"][0],
                        graph["detection_scores"][0],
                        graph["detection_classes"][0],
                        graph["num_detections"][0])
                                 .Run();
                    var boxes   = (float[, , ])output[0].GetValue();
                    var scores  = (float[, ])output[1].GetValue();
                    var classes = (float[, ])output[2].GetValue();
                    var num     = (float[])output[3].GetValue();

                    var personCount = Enumerable
                                      .Range(0, scores.GetLength(1))
                                      .Count(i => scores[0, i] >= 0.3 && Convert.ToInt32(classes[0, i]) == personClass.Id);

                    c.Response.ContentType = "application/json";

                    using (var streamWriter = new StreamWriter(c.Response.Body))
                    {
                        streamWriter.WriteLine($"{{\"result\": {personCount}}}");
                    }
                }

                return(Task.CompletedTask);
            });
        }
Example #13
0
        static void Main(string[] args)
        {
            var graph = new TFGraph();
            //重点是下面的这句,把训练好的pb文件给读出来字节,然后导入
            var model = File.ReadAllBytes("DBN_model.pb");

            graph.Import(model);
            //传进来的double数组
            using (var sess = new TFSession(graph))
            {
                var runner = sess.GetRunner();
                //获取预测对象
                var pre = graph["pre"][0];
                //获取数据归一化的均值
                var GetMean  = graph["mean"][0];
                var mean     = runner.Run(GetMean);
                var datamean = (float[, ])mean.GetValue();
                //获取数据归一化的方差
                var GetStd  = graph["std"][0];
                var std     = runner.Run(GetStd);
                var datastd = (float[, ])std.GetValue();
                //获取最小值
                var GetminVals  = graph["minVals"][0];
                var minVals     = runner.Run(GetminVals);
                var dataminVals = (float[, ])minVals.GetValue();
                //获取最大值
                var GetmaxVals  = graph["maxVals"][0];
                var maxVals     = runner.Run(GetmaxVals);
                var datamaxVals = (float[, ])maxVals.GetValue();
                var shape       = new long[] { 1, 18 };
                var size        = 18 * sizeof(float);
                var input       = new TFTensor(TFDataType.Float, shape, size);
                runner.AddInput(graph["X"][0], input);
                Console.WriteLine("输入预测需要的18个数据,数值之间用空格隔开!");
                while (true)
                {
                    Console.WriteLine("等待输入……");
                    string str = Console.ReadLine();
                    //string str = "379,68,610.8,45,27486,410,45	797,415,0	,399,	24,40,17,	1	,1	,1	,110";
                    string[] strarray = str.Split(' ');
                    double[,] dou = new double[1, 18];
                    for (int i = 0; i < strarray.Length; i++)
                    {
                        dou[0, i] = Convert.ToDouble(strarray[i]);
                    }
                    //将double类型的数组转化为float类型
                    float[,] floatitem = ConvertDoubleArrayTofloat(dou);
                    var inputs = new float[][, ] {
                        floatitem
                    };
                    //进行数据的归一化  (data-mean)/std
                    var input0 = inputs[0];
                    for (int i = 0; i < 18; i++)
                    {
                        input0[0, i] = (input0[0, i] - datamean[0, i]) / datastd[0, i];
                    }
                    //将数据线性转换到(0,1)之间


                    float[,] arraymin   = new float[1, 18];
                    float[,] arraymax   = new float[1, 18];
                    float[,] arrayrange = new float[1, 18];
                    float[,] finalinput = new float[1, 18];
                    arraymin            = GetMinArray(input0, dataminVals);
                    arraymax            = GetMaxArray(input0, datamaxVals);
                    arrayrange          = GetRange(arraymin, arraymax);
                    finalinput          = ConvertDataToOneZero(input0, arraymin, arrayrange);
                    input.SetValue(finalinput);
                    var pre23 = graph["pre"][0];
                    var r     = runner.Run(graph["pre"][0]);
                    var v1    = (float[, ])r.GetValue();
                    var v     = ConvertfloatArrayToOneOtZero(v1);
                    Console.WriteLine("预测故障为:");
                    Console.Write(v[0, 0]);
                    Console.Write(v[0, 1]);
                    Console.Write(v[0, 2]);
                    Console.Write(v[0, 3]);
                    Console.Write(v[0, 4]);
                    Console.WriteLine();
                }
            }
        }
    IEnumerator PoseUpdate(Texture2D texture)
    {
        if (texture.width != detectWidth || texture.height != detectHeight)
        {
            texture = scaled(texture, detectWidth, detectHeight);
        }
        var tensor = TransformInput(texture.GetPixels32());

        var runner = session.GetRunner();

        runner.AddInput(graph["image"][0], tensor);
        runner.Fetch(
            graph["heatmap"][0],
            graph["offset_2"][0],
            graph["displacement_fwd_2"][0],
            graph["displacement_bwd_2"][0]
            );

        var result           = runner.Run();
        var heatmap          = (float[, , , ])result[0].GetValue(jagged: false);
        var offsets          = (float[, , , ])result[1].GetValue(jagged: false);
        var displacementsFwd = (float[, , , ])result[2].GetValue(jagged: false);
        var displacementsBwd = (float[, , , ])result[3].GetValue(jagged: false);

        // Debug.Log(PoseNet.mean(heatmap));

        poses = posenet.DecodeMultiplePoses(
            heatmap, offsets,
            displacementsFwd,
            displacementsBwd,
            outputStride: 16, maxPoseDetections: 1,
            scoreThreshold: 0.5f, nmsRadius: 20);

        isPosing = false;

        /*/ここからtakasakaのコード
         *      if(poses.Length > 0 && poses[0].score >= minPoseConfidence){
         *              var pose = poses[0];
         *
         *              src.Clear();
         *              src["nose"]      = pose.keypoints[0];
         *              src["eyeL"]      = pose.keypoints[1]; src["eyeR"]       = pose.keypoints[2];
         *              src["earL"]      = pose.keypoints[3]; src["earR"]       = pose.keypoints[4];
         *              src["upperArmL"] = pose.keypoints[5]; src["upperArmR"]  = pose.keypoints[6];
         *              src["lowerArmL"] = pose.keypoints[7]; src["lowerArmR"]  = pose.keypoints[8];
         *              src["handL"]     = pose.keypoints[9]; src["handR"]      = pose.keypoints[10];
         *              src["upperLegL"] = pose.keypoints[11]; src["upperLegR"] = pose.keypoints[12];
         *              src["lowerLegL"] = pose.keypoints[13]; src["lowerLegR"] = pose.keypoints[14];
         *              src["footL"]     = pose.keypoints[15]; src["footR"]     = pose.keypoints[16];
         *
         *              joint.Clear();
         *              foreach(KeyValuePair<string, PoseNet.Keypoint> pair in src){
         *                      if(pair.Value.score < minPartConfidence){ continue; }
         *
         *                      //PoseNetの機能強化あるいは他の姿勢推定ライブラリに切り替えられるようVector3で作っておく
         *                      joint[pair.Key]  = new Vector3(pair.Value.position.x, videoHeight - pair.Value.position.y, 0);
         *              }
         *
         *              //前フレームのジョイント位置と平均を取る
         *              if(jointsAvg.Count == 0){
         *                      for(int i = 0; i < jointsCount; i++){
         *                              jointsAvg[i] = new Dictionary<string, Vector3>(joint);
         *                      }
         *              }
         *              jointsAvg[jointsIndex++ % jointsCount] = new Dictionary<string, Vector3>(joint);
         *
         *              foreach(string key in src.Keys){
         *                      if(!joint.ContainsKey(key)){ continue; }
         *                      joint[key] = Vector3.zero;
         *
         *                      int hit = 0;
         *                      for(int i = 0; i < jointsCount; i++){
         *                              if(!jointsAvg[i].ContainsKey(key)){ continue; }
         *
         *                              joint[key] += jointsAvg[i][key];
         ++hit;
         *                      }
         *                      if(hit > 0){ joint[key] /= hit; }
         *              }
         *
         *              //左腕
         *              if(joint.ContainsKey("upperArmL")){
         *                      if(joint.ContainsKey("lowerArmL")){
         *                              if(joint.ContainsKey("upperArmR")){
         *                                      UpdateJoint(joint, "upperArmR", "upperArmL", "lowerArmL", dst["upperArmL"]);
         *                                      AdjJoint(0, -40, 0, dst["upperArmL"]); //ダミー
         *                              }
         *
         *                              if(joint.ContainsKey("handL")){
         *                                      UpdateJoint(joint, "upperArmL", "lowerArmL", "handL", dst["lowerArmL"]);
         *
         *                                      //常に手のひらを向けるよう補正
         *                                      var armLow2Hand = (joint["handL"] - joint["lowerArmL"]);
         *                                      armLow2Hand.Normalize();
         *                                      var angleX = Rad2Deg(armLow2Hand.y) + 90;
         *                                      var angleY = Mathf.Min(0, Rad2Deg(armLow2Hand.x));
         *                                      AdjJoint(angleX, angleY, 0, dst["lowerArmL"]);
         *
         *                                      dst["handL"].transform.localRotation = new Quaternion();
         *                                      AdjJoint(0, 0, -20, dst["handL"]); //ダミー
         *                              }
         *                      }
         *              }
         *
         *              //右腕
         *              if(joint.ContainsKey("upperArmR")){
         *                      if(joint.ContainsKey("lowerArmR")){
         *                              if(joint.ContainsKey("upperArmL")){
         *                                      UpdateJoint(joint, "upperArmL", "upperArmR", "lowerArmR", dst["upperArmR"]);
         *                                      AdjJoint(0, 40, 0, dst["upperArmR"]); //ダミー
         *                              }
         *                              if(joint.ContainsKey("handR")){
         *                                      UpdateJoint(joint, "upperArmR", "lowerArmR", "handR", dst["lowerArmR"]);
         *
         *                                      //常に手のひらを向けるよう補正
         *                                      var armLow2Hand = joint["handR"]- joint["lowerArmR"];
         *                                      armLow2Hand.Normalize();
         *                                      var angleX = Rad2Deg(armLow2Hand.y) + 90;
         *                                      var angleY = Mathf.Max(0, Rad2Deg(armLow2Hand.x));
         *                                      AdjJoint(angleX, angleY, 0, dst["lowerArmR"]);
         *
         *                                      dst["handR"].transform.localRotation = new Quaternion();
         *                                      AdjJoint(0, 0, 20, dst["handR"]); //ダミー
         *                              }
         *                      }
         *              }
         *              //胸
         *              if(joint.ContainsKey("upperArmL") && joint.ContainsKey("upperArmR")){
         *                      joint["upperArmLL"] = joint["upperArmL"] + vecX;
         *                      UpdateJoint(joint, "upperArmLL", "upperArmL", "upperArmR", dst["upperChest"], -20, 20);
         *              }
         *              //腰
         *              if(joint.ContainsKey("upperLegL") && joint.ContainsKey("upperLegR")){
         *                      //基準点が無いので左肩の左水平方向に仮のジョイントを作る
         *                      joint["upperLegLL"] = joint["upperLegL"] + vecX;
         *                      UpdateJoint(joint, "upperLegLL", "upperLegL", "upperLegR", dst["spine"], -10, 10);
         *
         *                      float addX = -3.0f;
         *                      float mulX = 10.0f;
         *
         *                      var pos = joint["upperLegL"] + joint["upperLegR"];
         *                      pos /= 2;
         *                      var x = -(pos.x - (videoWidth / 2)) / videoWidth;
         *
         *                      Vector3 tmp = dst["hips"].transform.position;
         *                      dst["hips"].transform.position = new Vector3(x * mulX + addX, tmp.y, tmp.z);
         *
         *                      //AdjJoint(-20, 0, 0, dst["spine"]); //ダミー
         *              }
         *              //左脚
         *              if(joint.ContainsKey("upperLegL")){
         *                      if(joint.ContainsKey("lowerLegL")){
         *                              if(joint.ContainsKey("upperLegR")){
         *                                      //基準点が無いので左脚付け根の上方向に仮のジョイントを作る
         *                                      joint["upperLegLUp"] = joint["upperLegR"] - joint["upperLegL"];
         *                                      joint["upperLegLUp"].Normalize();
         *                                      joint["upperLegLUp"] = Quaternion.AngleAxis(Rad2Deg(-halfPi), vecZ) * joint["upperLegLUp"];
         *                                      joint["upperLegLUp"] += joint["upperLegL"];
         *                                      UpdateJoint(joint, "upperLegLUp", "upperLegL", "lowerLegL", dst["upperLegL"], -20, 20);
         *                              }
         *                              if(joint.ContainsKey("footL")){
         *                                      UpdateJoint(joint, "upperLegL", "lowerLegL", "footL", dst["lowerLegL"], -20, 20);
         *
         *                                      //基準点が無いので左足首の下垂直方向に仮のジョイントを作る
         *                                      joint["footLDown"] = joint["footL"] - vecY;
         *                                      UpdateJoint(joint, "lowerLegL", "footL", "footLDown", dst["footL"]);
         *                              }else{
         *                                      //基準点が無いので左膝の下垂直方向に仮のジョイントを作る
         *                                      joint["lowerLegLDown"] = joint["lowerLegL"] - vecY;
         *                                      UpdateJoint(joint, "upperLegL", "lowerLegL", "lowerLegLDown", dst["lowerLegL"]);
         *                                      UpdateJoint(joint, "lowerLegL", "lowerLegLDown", "lowerLegLDown", dst["footL"]);
         *                              }
         *                              AdjJoint(0, 10, 0, dst["footL"]); //ダミー
         *                      }
         *              }
         *              //右脚
         *              if(joint.ContainsKey("upperLegR")){
         *                      if(joint.ContainsKey("lowerLegR")){
         *                              if(joint.ContainsKey("upperLegL")){
         *                                      //基準点が無いので右脚付け根の上方向に仮のジョイントを作る
         *                                      joint["upperLegRUp"] = joint["upperLegL"] - joint["upperLegR"];
         *                                      joint["upperLegRUp"].Normalize();
         *                                      joint["upperLegRUp"] = Quaternion.AngleAxis(Rad2Deg(halfPi), vecZ) * joint["upperLegRUp"];
         *                                      joint["upperLegRUp"] += joint["upperLegR"];
         *                                      UpdateJoint(joint, "upperLegRUp", "upperLegR", "lowerLegR", dst["upperLegR"], -20, 20);
         *                              }
         *                              if(joint.ContainsKey("footR")){
         *                                      UpdateJoint(joint, "upperLegR", "lowerLegR", "footR", dst["lowerLegR"], -20, 20);
         *
         *                                      //基準点が無いので右足首の下垂直方向に仮のジョイントを作る
         *                                      joint["footRDown"] = joint["footR"] - vecY;
         *                                      UpdateJoint(joint, "lowerLegR", "footR", "footRDown", dst["footR"]);
         *                              }else{
         *                                      //基準点が無いので右膝の下垂直方向に仮のジョイントを作る
         *                                      joint["lowerLegRDown"] = joint["lowerLegR"] - vecY;
         *                                      UpdateJoint(joint, "upperLegR", "lowerLegR", "lowerLegRDown", dst["lowerLegR"]);
         *                                      UpdateJoint(joint, "lowerLegR", "lowerLegRDown", "lowerLegRDown", dst["footR"]);
         *                              }
         *                              AdjJoint(0, -10, 0, dst["footR"]); //ダミー
         *                      }
         *              }
         * }
         * //ここまでtakasakaのコード*/
        Debug.Log("pose update:" + Time.time + ", time:" + (Time.time - nowTime) + ", FPS:" + 1 / (Time.time - nowTime));
        nowTime = Time.time;
        yield return(null);
    }
Example #15
0
    // Update is called once per frame
    void Update()
    {
        if (Input.GetMouseButtonDown(0))
        {
            SaveImage();

            //Debug.Log("Show Origin image.");
            //OpenCVInterop.Show();

            if (!_ready)
            {
                return;
            }

            int detectedFaceCount = 0;
            unsafe
            {
                fixed(CvCircle *outFaces = _faces)
                {
                    Debug.Log("Detect Start.");
                    //outFaces
                    OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);

                    //
                    //Tensorflow code
                    //
                    string PATH      = "cropimg.png";                                    //이미지 위치를 저장하는 변수
                    var    testImage = Resources.Load(PATH, typeof(Texture2D)) as Image; //이미지 로드

                    var file = "./Assets/cropimg.png";

                    //Tensor 불러오는 소스
                    TFSession.Runner runner;

                    TextAsset graphModel = Resources.Load("tf_model_191203_05") as TextAsset;
                    var       graph      = new TFGraph();

                    //graph.Import(new TFBuffer(graphModel.bytes));
                    graph.Import(graphModel.bytes);
                    TFSession session = new TFSession(graph);

                    Debug.Log("loaded freezed graph");

                    // Input , output 설정
                    //int inputSize = 48;
                    //Texture2D img_input = testImage;
                    //TFTensor input_tensor = TransformInput(img_input.GetPixels32(), inputSize, inputSize);
                    //SetScreen(testImage.width, testImage.height, rawimage, testImage);

                    var tensor = CreateTensorFromImageData(file);

                    runner = session.GetRunner();
                    runner.AddInput(graph["input_1"][0], tensor);
                    runner.Fetch(graph["predictions/Softmax"][0]);

                    Debug.Log("fetch finish");

                    // 실행
                    float[,] results = runner.Run()[0].GetValue() as float[, ];

                    Debug.Log("run");

                    float output = 0.0f;

                    string[] labels = { "Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral" };


                    for (int i = 0; i < 7; i++)
                    {
                        output = results[0, i];
                        Debug.Log(labels[i] + ":" + output);
                        percent[i] = output * 100;

                        if (output >= result_rate)
                        {
                            result_rate  = output;
                            result_label = i;
                        }
                    }
                }
            }

            webcamTexture.Stop();
            byte[] byteArray = File.ReadAllBytes(@"C:\Users\dqf96\Desktop\NewUnityProject - 복사본\Assets\cropimg.png");
            //create a texture and load byte array to it
            // Texture size does not matter
            Texture2D sampleTexture = new Texture2D(2, 2);
            // the size of the texture will be replaced by image size
            bool isLoaded = sampleTexture.LoadImage(byteArray);
            // apply this texure as per requirement on image or material
            GameObject image = GameObject.Find("RawImage");
            if (isLoaded)
            {
                image.GetComponent <RawImage>().texture = sampleTexture;
            }

            // 결과 화면에 표시하기
            slider1         = GameObject.Find("Canvas1").transform.Find("Slider1").GetComponent <Slider>();
            slider1.value   = percent[0];
            textAsset1      = GameObject.Find("Canvas1").transform.Find("result1").GetComponent <Text>();
            textAsset1.text = percent[0] + "%";

            slider2         = GameObject.Find("Canvas1").transform.Find("Slider2").GetComponent <Slider>();
            slider2.value   = percent[1];
            textAsset2      = GameObject.Find("Canvas1").transform.Find("result2").GetComponent <Text>();
            textAsset2.text = percent[1] + "%";

            slider3         = GameObject.Find("Canvas1").transform.Find("Slider3").GetComponent <Slider>();
            slider3.value   = percent[2];
            textAsset3      = GameObject.Find("Canvas1").transform.Find("result3").GetComponent <Text>();
            textAsset3.text = percent[2] + "%";

            slider4         = GameObject.Find("Canvas1").transform.Find("Slider4").GetComponent <Slider>();
            slider4.value   = percent[3];
            textAsset4      = GameObject.Find("Canvas1").transform.Find("result4").GetComponent <Text>();
            textAsset4.text = percent[3] + "%";

            slider5         = GameObject.Find("Canvas1").transform.Find("Slider5").GetComponent <Slider>();
            slider5.value   = percent[4];
            textAsset5      = GameObject.Find("Canvas1").transform.Find("result5").GetComponent <Text>();
            textAsset5.text = percent[4] + "%";

            slider6         = GameObject.Find("Canvas1").transform.Find("Slider6").GetComponent <Slider>();
            slider6.value   = percent[5];
            textAsset6      = GameObject.Find("Canvas1").transform.Find("result6").GetComponent <Text>();
            textAsset6.text = percent[5] + "%";

            slider7         = GameObject.Find("Canvas1").transform.Find("Slider7").GetComponent <Slider>();
            slider7.value   = percent[6];
            textAsset7      = GameObject.Find("Canvas1").transform.Find("result7").GetComponent <Text>();
            textAsset7.text = percent[6] + "%";

            textAsset      = GameObject.Find("Canvas1").transform.Find("result").GetComponent <Text>();
            textAsset.text = labels[result_label] + ":" + percent[result_label] + "%";
        }
    }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            //var files = options.Parse (args);
            if (dir == null)
            {
                dir = "/tmp";
                //Error ("Must specify a directory with -m to store the training data");
            }

            //if (files == null || files.Count == 0)
            //	Error ("No files were specified");

            //if (files.Count == 0)
            var files = new List <string> () /*"Images/ship.jpeg",*/ "Images/cat.jpeg"
            {
            };

            ModelFiles(dir);

            // Construct an in-memory graph from the serialized form.
            var graph = new TFGraph();
            // Load the serialized GraphDef from a file.
            var model = File.ReadAllBytes(modelFile);

            graph.Import(model, "");
            using (var session = new TFSession(graph)) {
                var labels = File.ReadAllLines(labelsFile);

                foreach (var file in files)
                {
                    const int   wanted_width    = 299;
                    const int   wanted_height   = 299;
                    const int   wanted_channels = 3;
                    const float input_mean      = 0.0f;
                    const float input_std       = 255.0f;

                    // Run inference on the image files
                    // For multiple images, session.Run() can be called in a loop (and
                    // concurrently). Alternatively, images can be batched since the model
                    // accepts batches of image data as input.
                    var tensor = CreateTensorFromImageFile(file, wanted_width, wanted_height, wanted_channels, input_mean, input_std);

                    var runner = session.GetRunner();
                    runner.AddInput(graph ["input"] [0], tensor).Fetch(graph ["InceptionV3/Predictions/Reshape_1"] [0]);
                    var output = runner.Run();
                    // output[0].Value() is a vector containing probabilities of
                    // labels for each image in the "batch". The batch size was 1.
                    // Find the most probably label index.

                    var result = output [0];
                    var rshape = result.Shape;
                    if (result.NumDims != 2 || rshape [0] != 1)
                    {
                        var shape = "";
                        foreach (var d in rshape)
                        {
                            shape += $"{d} ";
                        }
                        shape = shape.Trim();
                        Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]");
                        Environment.Exit(1);
                    }

                    // You can get the data in two ways, as a multi-dimensional array, or arrays of arrays,
                    // code can be nicer to read with one or the other, pick it based on how you want to process
                    // it
                    bool jagged = true;

                    var         list = new List <Tuple <int, float> > ();
                    var         bestIdx = 0;
                    float       p = 0, best = 0;
                    const float kThreshold = 0.1f;
                    const int   maxCount   = 2;

                    if (jagged)
                    {
                        var probabilities = ((float [] [])result.GetValue(jagged: true)) [0];
                        for (int i = 0; i < probabilities.Length; i++)
                        {
                            if (list.Count >= maxCount)
                            {
                                break;
                            }

                            if (probabilities [i] > kThreshold)
                            {
                                list.Add(new Tuple <int, float> (i, probabilities[i]));
                                bestIdx = i;
                                best    = probabilities [i];
                            }
                        }
                    }
                    else
                    {
                        var val = (float [, ])result.GetValue(jagged: false);

                        // Result is [1,N], flatten array
                        for (int i = 0; i < val.GetLength(1); i++)
                        {
                            if (list.Count >= maxCount)
                            {
                                break;
                            }

                            if (val [0, i] > kThreshold)
                            {
                                list.Add(new Tuple <int, float> (i, val [0, i]));
                                bestIdx = i;
                                best    = val [0, i];
                            }
                        }
                    }
                    foreach (Tuple <int, float> t in list)
                    {
                        Console.WriteLine($"{file} best match: [{t.Item1}] {t.Item2 * 100.0}% {labels [t.Item1]}");
                    }
                }
            }
        }
    void IRCamThread()
    {
        var runner = session.GetRunner();

        irRunning = true;
        while (irRunning)
        {
            byte[] imgsBytes = new byte[1 + 5 * INPUT_SIZE * INPUT_SIZE];
            UpdateDWIRHandImages(dwImuPluginObj, imgsBytes);
            if (imgsBytes[0] == 110) // hand imges not updated yet
            {
                Thread.Sleep(5);
                continue;
            }
            int imgNum = imgsBytes[0];

            int   pixInd    = 1;
            int   bestLabel = 3;
            float bestScore = 0.0f;
            for (int i = 0; i < imgNum; ++i)
            {
                // get image pixels
                float[] imgArray = new float[64 * 64];
                for (int ind = 0; ind < 64 * 64; ++ind)
                {
                    imgArray[ind] = imgsBytes[pixInd] / 255.0f;
                    pixInd++;
                }
                try
                {
                    // classify
                    runner = session.GetRunner();
                    TFTensor inputImg     = TFTensor.FromBuffer(new TFShape(new long[] { 1, 64, 64, 1 }), imgArray, 0, 64 * 64);
                    TFTensor inputDropout = new TFTensor(1.0f);
                    runner.AddInput(graph["data/X_placeholder"][0], inputImg);
                    runner.AddInput(graph["dropout"][0], inputDropout);
                    runner.Fetch(graph["softmax_linear/logits"][0]);
                    TFTensor[] output = runner.Run();
                    float[,] recurrent_tensor = output[0].GetValue() as float[, ];

                    // Dispose TFTensors
                    for (int ind = 0; ind < output.Length; ++ind)
                    {
                        output[ind].Dispose();
                    }
                    inputImg.Dispose();
                    inputDropout.Dispose();


                    // get best classification
                    int   predict = 0;
                    float prob    = recurrent_tensor[0, 0];
                    for (int k = 1; k < 4; ++k)
                    {
                        if (recurrent_tensor[0, k] > prob)
                        {
                            prob    = recurrent_tensor[0, k];
                            predict = k;
                        }
                    }
                    // classification != 3 means there is a valid gesture

                    /*
                     * 0: fist
                     * 1: finger
                     * 2: palm
                     * 3: other
                     */
                    if (predict != 3 && prob > bestScore)
                    {
                        bestScore = prob;
                        bestLabel = predict;
                    }
                }
                catch (Exception e)
                {
                    Debug.Log(e.ToString());
                }
            }

            // feed static gesture and get dynamic gesture
            byte[] outbuf = Encoding.ASCII.GetBytes(bestLabel.ToString());
            byte[] inbyte = new byte[2];
            UpdateHandGesture(dwImuPluginObj, outbuf, inbyte);
            gestureMsg = Encoding.ASCII.GetString(inbyte);
            UpdateGesture();
        }
        Debug.Log("DWPluginScripy_PC IRCamThread quit!");
    }
Example #18
0
        public Int32 识别方法(out double OU)
        {
            using (var session = new TFSession(graph))
            {
                var tensor = CreateTensorFromImageFile(临时图片路径);

                var runner = session.GetRunner();

                runner.AddInput(graph["x_input"][0], tensor).Fetch(graph["softmax_linear/softmax_linear"][0]);

                var output = runner.Run();

                var result = output[0];

                var rshape = result.Shape;

                if (result.NumDims != 2 || rshape[0] != 1)
                {
                    var shape = "";
                    foreach (var d in rshape)
                    {
                        shape += $"{d} ";
                    }
                    shape = shape.Trim();
                    Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]");
                    Environment.Exit(1);
                }

                bool jagged = true;

                var bestIdx = 0;

                float  p    = 0;
                double best = 0;
                if (jagged)
                {
                    var      probabilities = ((float[][])result.GetValue(jagged: true))[0];
                    double[] d             = floatTodouble(probabilities);
                    double[] retResult     = Softmax(d);

                    for (int i = 0; i < retResult.Length; i++)
                    {
                        if (retResult[i] > best)
                        {
                            bestIdx = i;
                            //best = probabilities[i];
                            best = retResult[i];
                        }
                    }
                }
                else
                {
                    var val = (float[, ])result.GetValue(jagged: false);
                    for (int i = 0; i < val.GetLength(1); i++)
                    {
                        if (val[0, i] > best)
                        {
                            bestIdx = i;
                            best    = val[0, i];
                        }
                    }
                }
                OU = best;
                return(bestIdx);
            }
        }
Example #19
0
    /// Uses the stored information to run the tensorflow graph and generate
    /// the actions.
    public void DecideAction(Dictionary <Agent, AgentInfo> agentInfo)
    {
#if ENABLE_TENSORFLOW
        if (coord != null)
        {
            coord.GiveBrainInfo(brain, agentInfo);
        }
        int          currentBatchSize = agentInfo.Count();
        List <Agent> agentList        = agentInfo.Keys.ToList();
        if (currentBatchSize == 0)
        {
            return;
        }


        // Create the state tensor
        if (hasState)
        {
            int stateLength = 1;
            if (brain.brainParameters.vectorObservationSpaceType == SpaceType.continuous)
            {
                stateLength = brain.brainParameters.vectorObservationSize;
            }
            inputState = new float[currentBatchSize, stateLength *brain.brainParameters.numStackedVectorObservations];

            var i = 0;
            foreach (Agent agent in agentList)
            {
                List <float> state_list = agentInfo[agent].stackedVectorObservation;
                for (int j = 0; j < stateLength * brain.brainParameters.numStackedVectorObservations; j++)
                {
                    inputState[i, j] = state_list[j];
                }
                i++;
            }
        }

        // Create the state tensor
        if (hasPrevAction)
        {
            inputPrevAction = new int[currentBatchSize];
            var i = 0;
            foreach (Agent agent in agentList)
            {
                float[] action_list = agentInfo[agent].storedVectorActions;
                inputPrevAction[i] = Mathf.FloorToInt(action_list[0]);
                i++;
            }
        }


        observationMatrixList.Clear();
        for (int observationIndex = 0; observationIndex < brain.brainParameters.cameraResolutions.Count(); observationIndex++)
        {
            texturesHolder.Clear();
            foreach (Agent agent in agentList)
            {
                texturesHolder.Add(agentInfo[agent].visualObservations[observationIndex]);
            }
            observationMatrixList.Add(
                BatchVisualObservations(texturesHolder, brain.brainParameters.cameraResolutions[observationIndex].blackAndWhite));
        }

        // Create the recurrent tensor
        if (hasRecurrent)
        {
            // Need to have variable memory size
            inputOldMemories = new float[currentBatchSize, memorySize];
            var i = 0;
            foreach (Agent agent in agentList)
            {
                float[] m = agentInfo[agent].memories.ToArray();
                for (int j = 0; j < m.Count(); j++)
                {
                    inputOldMemories[i, j] = m[j];
                }
                i++;
            }
        }


        var runner = session.GetRunner();
        try
        {
            runner.Fetch(graph[graphScope + ActionPlaceholderName][0]);
        }
        catch
        {
            throw new UnityAgentsException(string.Format(@"The node {0} could not be found. Please make sure the graphScope {1} is correct",
                                                         graphScope + ActionPlaceholderName, graphScope));
        }

        if (hasBatchSize)
        {
            runner.AddInput(graph[graphScope + BatchSizePlaceholderName][0], new int[] { currentBatchSize });
        }

        foreach (TensorFlowAgentPlaceholder placeholder in graphPlaceholders)
        {
            try
            {
                if (placeholder.valueType == TensorFlowAgentPlaceholder.tensorType.FloatingPoint)
                {
                    runner.AddInput(graph[graphScope + placeholder.name][0], new float[] { Random.Range(placeholder.minValue, placeholder.maxValue) });
                }
                else if (placeholder.valueType == TensorFlowAgentPlaceholder.tensorType.Integer)
                {
                    runner.AddInput(graph[graphScope + placeholder.name][0], new int[] { Random.Range((int)placeholder.minValue, (int)placeholder.maxValue + 1) });
                }
            }
            catch
            {
                throw new UnityAgentsException(string.Format(@"One of the Tensorflow placeholder cound nout be found.
                In brain {0}, there are no {1} placeholder named {2}.",
                                                             brain.gameObject.name, placeholder.valueType.ToString(), graphScope + placeholder.name));
            }
        }

        // Create the state tensor
        if (hasState)
        {
            if (brain.brainParameters.vectorObservationSpaceType == SpaceType.discrete)
            {
                var discreteInputState = new int[currentBatchSize, 1];
                for (int i = 0; i < currentBatchSize; i++)
                {
                    discreteInputState[i, 0] = (int)inputState[i, 0];
                }
                runner.AddInput(graph[graphScope + VectorObservationPlacholderName][0], discreteInputState);
            }
            else
            {
                runner.AddInput(graph[graphScope + VectorObservationPlacholderName][0], inputState);
            }
        }

        // Create the previous action tensor
        if (hasPrevAction)
        {
            runner.AddInput(graph[graphScope + PreviousActionPlaceholderName][0], inputPrevAction);
        }

        // Create the observation tensors
        for (int obs_number = 0; obs_number < brain.brainParameters.cameraResolutions.Length; obs_number++)
        {
            runner.AddInput(graph[graphScope + VisualObservationPlaceholderName[obs_number]][0], observationMatrixList[obs_number]);
        }

        if (hasRecurrent)
        {
            runner.AddInput(graph[graphScope + "sequence_length"][0], 1);
            runner.AddInput(graph[graphScope + RecurrentInPlaceholderName][0], inputOldMemories);
            runner.Fetch(graph[graphScope + RecurrentOutPlaceholderName][0]);
        }

        TFTensor[] networkOutput;
        try
        {
            networkOutput = runner.Run();
        }
        catch (TFException e)
        {
            string errorMessage = e.Message;
            try
            {
                errorMessage = string.Format(@"The tensorflow graph needs an input for {0} of type {1}",
                                             e.Message.Split(new string[] { "Node: " }, 0)[1].Split('=')[0],
                                             e.Message.Split(new string[] { "dtype=" }, 0)[1].Split(',')[0]);
            }
            finally
            {
                throw new UnityAgentsException(errorMessage);
            }
        }

        // Create the recurrent tensor
        if (hasRecurrent)
        {
            float[,] recurrent_tensor = networkOutput[1].GetValue() as float[, ];

            var i = 0;
            foreach (Agent agent in agentList)
            {
                var m = new float[memorySize];
                for (int j = 0; j < memorySize; j++)
                {
                    m[j] = recurrent_tensor[i, j];
                }
                agent.UpdateMemoriesAction(m.ToList());
                i++;
            }
        }

        if (brain.brainParameters.vectorActionSpaceType == SpaceType.continuous)
        {
            var output = networkOutput[0].GetValue() as float[, ];
            var i      = 0;
            foreach (Agent agent in agentList)
            {
                var a = new float[brain.brainParameters.vectorActionSize];
                for (int j = 0; j < brain.brainParameters.vectorActionSize; j++)
                {
                    a[j] = output[i, j];
                }
                agent.UpdateVectorAction(a);
                i++;
            }
        }
        else if (brain.brainParameters.vectorActionSpaceType == SpaceType.discrete)
        {
            long[,] output = networkOutput[0].GetValue() as long[, ];
            var i = 0;
            foreach (Agent agent in agentList)
            {
                var a = new float[1] {
                    (float)(output[i, 0])
                };
                agent.UpdateVectorAction(a);
                i++;
            }
        }
#else
        if (agentInfo.Count > 0)
        {
            throw new UnityAgentsException(string.Format(@"The brain {0} was set to Internal but the Tensorflow 
                        library is not present in the Unity project.",
                                                         brain.gameObject.name));
        }
#endif
    }
        private async Task DoWork(CancellationToken stoppingToken)
        {
            var images       = JsonConvert.DeserializeObject <ImageMetadata[]>(File.ReadAllText("assets/images/images.json"));
            var rand         = new Random();
            var graph        = new TFGraph();
            var model        = File.ReadAllBytes("assets/model.pb");
            var labels       = File.ReadAllLines("assets/labels.txt");
            var hostname     = Guid.NewGuid().ToString();
            var jsonSettings = new JsonSerializerSettings
            {
                ContractResolver = new CamelCasePropertyNamesContractResolver()
            };
            var httpClient = new HttpClient();

            graph.Import(model);

            var sw = new Stopwatch();

            while (!stoppingToken.IsCancellationRequested)
            {
                using (var session = new TFSession(graph))
                {
                    var image = images[rand.Next(images.Length)];
                    sw.Reset();
                    sw.Start();
                    var tensor = ImageUtil.CreateTensorFromImageFile($"assets/images/{image.ImageId}.{image.EncodingFormat}");
                    var runner = session.GetRunner();
                    runner.AddInput(graph["Placeholder"][0], tensor).Fetch(graph["loss"][0]);
                    var output     = runner.Run();
                    var result     = output[0];
                    var apiBaseUrl = Environment.GetEnvironmentVariable("API_BASE_URL") ?? "http://localhost:5000";

                    var probabilities      = ((float[][])result.GetValue(jagged: true))[0];
                    var highestProbability = probabilities
                                             .Select((p, i) => (Probability: p, Index: i))
                                             .OrderByDescending(p => p.Probability)
                                             .First();
                    var bestResult = (Label : labels[highestProbability.Index], Probability : highestProbability.Probability);
                    Thread.Sleep(Convert.ToInt32(sw.ElapsedMilliseconds) + 1000);
                    sw.Stop();

                    var classificationResult = new ClassificationResult
                    {
                        Image       = image,
                        Label       = bestResult.Label,
                        Probability = bestResult.Probability,
                        WorkerId    = hostname,
                        TimeTaken   = sw.ElapsedMilliseconds
                    };

                    Console.WriteLine(JsonConvert.SerializeObject(classificationResult, jsonSettings));
                    var content = new StringContent(
                        JsonConvert.SerializeObject(classificationResult, jsonSettings),
                        Encoding.UTF8,
                        "application/json"
                        );
                    try
                    {
                        await httpClient.PostAsync($"{apiBaseUrl}/api/imageprocessed", content);
                    }
                    catch (Exception e) {
                        Console.WriteLine(e.ToString());
                    }

                    tensor.Dispose();
                    foreach (var o in output)
                    {
                        o.Dispose();
                    }
                }
            }
        }
Example #21
0
        public FlexNet3D(string modelDir, int3 boxDimensions, int gpuID = 0, int nThreads = 1, bool forTraining = true, int batchSize = 128, int bottleneckWidth = 2, int layerWidth = 64, int nlayers = 4)
        {
            BoxDimensions   = boxDimensions;
            ForTraining     = forTraining;
            BatchSize       = batchSize;
            BottleneckWidth = bottleneckWidth;
            NWeights0       = layerWidth;
            NLayers         = nlayers;
            ModelDir        = modelDir;
            MaxThreads      = nThreads;

            TFSessionOptions SessionOptions = TFHelper.CreateOptions();
            TFSession        Dummy          = new TFSession(new TFGraph(), SessionOptions);

            Session = TFHelper.FromSavedModel(SessionOptions, null, ModelDir, new[] { forTraining ? "train" : "serve" }, new TFGraph(), $"/device:GPU:{gpuID}");
            Graph   = Session.Graph;

            NodeInputSource       = Graph["volume_source"][0];
            NodeInputTarget       = Graph["volume_target"][0];
            NodeInputWeightSource = Graph["volume_weight_source"][0];
            NodeInputWeightTarget = Graph["volume_weight_target"][0];
            NodeDropoutRate       = Graph["training_dropout_rate"][0];
            if (forTraining)
            {
                NodeLearningRate      = Graph["training_learning_rate"][0];
                NodeOrthogonalityRate = Graph["training_orthogonality"][0];
                NodeOpTrain           = Graph["train_momentum"][0];
                NodeOutputLoss        = Graph["l2_loss"][0];
                NodeOutputLossKL      = Graph["kl_loss"][0];
                NodeBottleneck        = Graph["bottleneck"][0];
            }

            NodeCode = Graph["volume_code"][0];

            NodeOutputPredicted = Graph["volume_predict"][0];

            NodeWeights0 = Graph["encoder_0/weights_0"][0];
            NodeWeights1 = Graph[$"decoder_{nlayers - 1}/weights_{nlayers - 1}"][0];
            if (forTraining)
            {
                NodeWeights0Assign = Graph["encoder_0/assign_layer0"][0];
                NodeWeights0Input  = Graph["encoder_0/assign_layer0_values"][0];

                NodeWeights1Assign = Graph[$"decoder_{nlayers - 1}/assign_layer0"][0];
                NodeWeights1Input  = Graph[$"decoder_{nlayers - 1}/assign_layer0_values"][0];
            }

            TensorSource = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 2),
                                                                           new float[BatchSize * BoxDimensions.ElementsFFT() * 2],
                                                                           0,
                                                                           BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                                                  nThreads);

            TensorTarget = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 2),
                                                                           new float[BatchSize * BoxDimensions.ElementsFFT() * 2],
                                                                           0,
                                                                           BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                                                  nThreads);

            TensorWeightSource = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 1),
                                                                                 new float[BatchSize * BoxDimensions.ElementsFFT()],
                                                                                 0,
                                                                                 BatchSize * (int)BoxDimensions.ElementsFFT()),
                                                        nThreads);

            TensorWeightTarget = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 1),
                                                                                 new float[BatchSize * BoxDimensions.ElementsFFT()],
                                                                                 0,
                                                                                 BatchSize * (int)BoxDimensions.ElementsFFT()),
                                                        nThreads);

            TensorCode = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BottleneckWidth),
                                                                         new float[BatchSize * BottleneckWidth],
                                                                         0,
                                                                         BatchSize * BottleneckWidth),
                                                nThreads);

            TensorLearningRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                 new float[1],
                                                                                 0,
                                                                                 1),
                                                        nThreads);

            TensorDropoutRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                new float[1],
                                                                                0,
                                                                                1),
                                                       nThreads);

            TensorOrthogonalityRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                      new float[1],
                                                                                      0,
                                                                                      1),
                                                             nThreads);

            ResultPredicted  = Helper.ArrayOfFunction(i => new float[BatchSize * BoxDimensions.ElementsFFT() * 2], nThreads);
            ResultBottleneck = Helper.ArrayOfFunction(i => new float[BatchSize * BottleneckWidth], nThreads);
            ResultLoss       = Helper.ArrayOfFunction(i => new float[1], nThreads);
            ResultLossKL     = Helper.ArrayOfFunction(i => new float[1], nThreads);

            RetrievedWeights = new float[boxDimensions.ElementsFFT() * 2 * NWeights0];

            //if (!ForTraining)
            RunnerPrediction = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                      AddInput(NodeCode, TensorCode[i]).
                                                      AddInput(NodeDropoutRate, TensorDropoutRate[i]).
                                                      Fetch(NodeOutputPredicted),
                                                      nThreads);
            //else
            RunnerTraining = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                    AddInput(NodeInputSource, TensorSource[i]).
                                                    AddInput(NodeInputTarget, TensorTarget[i]).
                                                    AddInput(NodeInputWeightSource, TensorWeightSource[i]).
                                                    AddInput(NodeInputWeightTarget, TensorWeightTarget[i]).
                                                    AddInput(NodeDropoutRate, TensorDropoutRate[i]).
                                                    AddInput(NodeLearningRate, TensorLearningRate[i]).
                                                    AddInput(NodeOrthogonalityRate, TensorOrthogonalityRate[i]).
                                                    Fetch(NodeOutputPredicted, NodeOutputLoss, NodeOutputLossKL, NodeBottleneck, NodeOpTrain),
                                                    nThreads);

            RunnerEncode = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                  AddInput(NodeInputSource, TensorSource[i]).
                                                  AddInput(NodeInputWeightSource, TensorWeightSource[i]).
                                                  AddInput(NodeDropoutRate, TensorDropoutRate[i]).
                                                  Fetch(NodeBottleneck),
                                                  nThreads);

            RunnerRetrieveWeights0 = Session.GetRunner().Fetch(NodeWeights0);
            RunnerRetrieveWeights1 = Session.GetRunner().Fetch(NodeWeights1);

            if (ForTraining)
            {
                TensorWeights0 = TFTensor.FromBuffer(new TFShape(NWeights0, BoxDimensions.ElementsFFT() * 2),
                                                     new float[BoxDimensions.ElementsFFT() * 2 * NWeights0],
                                                     0,
                                                     (int)BoxDimensions.ElementsFFT() * 2 * NWeights0);

                RunnerAssignWeights0 = Session.GetRunner().AddInput(NodeWeights0Input, TensorWeights0).
                                       Fetch(NodeWeights0Assign);
                RunnerAssignWeights1 = Session.GetRunner().AddInput(NodeWeights1Input, TensorWeights0).
                                       Fetch(NodeWeights1Assign);
            }

            // Run prediction or training for one batch to claim all the memory needed
            float[] InitDecoded;
            float[] InitBottleneck;
            float[] InitLoss, InitLossKL;
            if (!ForTraining)
            {
                RandomNormal RandN = new RandomNormal(123);
                Predict(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BottleneckWidth * BatchSize),
                        0,
                        out InitDecoded);
            }
            else
            {
                RandomNormal RandN = new RandomNormal();

                Encode(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                       Helper.ArrayOfFunction(i => 1f, BatchSize * (int)BoxDimensions.ElementsFFT()),
                       0,
                       out InitBottleneck);

                Train(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                      Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                      Helper.ArrayOfFunction(i => 1f, BatchSize * (int)BoxDimensions.ElementsFFT()),
                      Helper.ArrayOfFunction(i => 1f, BatchSize * (int)BoxDimensions.ElementsFFT()),
                      0.5f,
                      1e-10f,
                      1e-5f,
                      0,
                      out InitDecoded,
                      out InitBottleneck,
                      out InitLoss,
                      out InitLossKL);
            }
        }
Example #22
0
        private void PrintVariables(Dictionary <Tensor, Array> feed_dict, TFSession session)
        {
            string[] ops =
            {
                //"SGD/grad/dense_1/dense_1/kernel/var",
                //"SGD/grad/dense_2/dense_2/kernel/var",
                //"SGD/grad/dense_2/dense_2/bias/var",
                //"loss/dense_1_loss/y_true",
                //"loss/dense_1_loss/y_pred",
                //"loss/dense_1_loss/weights",
                //"iterations/var",
                //"lr/var",
                //"lr_t",
                //"p_t",
                //"metrics/binary_accuracy/Round0",
                //"metrics/binary_accuracy/Cast0",
                //"metrics/binary_accuracy/Mean0",
                //"metrics/binary_accuracy/Equal0",
                //"metrics/binary_accuracy/value",
                //"metrics/score_array/mean"
                //"beta_1/var",
                //"beta_2/var",
                //"decay/var",
                //"adam/grad/dense_1/dense_1/kernel/var",
                //"dense_1/variance_scaling/1/scaled",
                //"dense_1/dense_1/kernel/var",
                //"dense_1/call/dot",
                //"dense_1/call/Sigmoid0",
            };

            foreach (var op in ops)
            {
                try
                {
                    var debugRunner = session.GetRunner();
                    foreach (KeyValuePair <Tensor, Array> pair in feed_dict)
                    {
                        TensorFlowTensor t = K.In(pair.Key);
                        debugRunner.AddInput(t.output, pair.Value);
                    }

                    Console.WriteLine(op);
                    debugRunner.Fetch(op);

                    var v = debugRunner.Run();

                    object obj = v[0].GetValue();

                    if (obj is float[, ])
                    {
                        Console.WriteLine((obj as float[, ]).ToCSharp());
                    }
                    else if (obj is float[])
                    {
                        Console.WriteLine((obj as float[]).ToCSharp());
                    }
                    else if (obj is bool[, ])
                    {
                        Console.WriteLine((obj as bool[, ]).ToCSharp());
                    }
                    else if (obj is bool[])
                    {
                        Console.WriteLine((obj as bool[]).ToCSharp());
                    }
                    else if (obj is sbyte[, ])
                    {
                        Console.WriteLine((obj as sbyte[, ]).ToCSharp());
                    }
                    else if (obj is sbyte[])
                    {
                        Console.WriteLine((obj as sbyte[]).ToCSharp());
                    }
                    else
                    {
                        Console.WriteLine(obj);
                    }
                }
                catch
                {
                }
            }
        }
Example #23
0
    /// Uses the stored information to run the tensorflow graph and generate
    /// the actions.
    public void DecideAction()
    {
#if ENABLE_TENSORFLOW
        if (currentBatchSize == 0)
        {
            return;
        }

        var runner = session.GetRunner();
        runner.Fetch(graph[graphScope + ActionPlaceholderName][0]);

        if (hasBatchSize)
        {
            runner.AddInput(graph[graphScope + BatchSizePlaceholderName][0], new int[] { currentBatchSize });
        }

        foreach (TensorFlowAgentPlaceholder placeholder in graphPlaceholders)
        {
            if (placeholder.valueType == TensorFlowAgentPlaceholder.tensorType.FloatingPoint)
            {
                runner.AddInput(graph[graphScope + placeholder.name][0], new float[] { Random.Range(placeholder.minValue, placeholder.maxValue) });
            }
            else if (placeholder.valueType == TensorFlowAgentPlaceholder.tensorType.Integer)
            {
                runner.AddInput(graph[graphScope + placeholder.name][0], new int[] { Random.Range((int)placeholder.minValue, (int)placeholder.maxValue + 1) });
            }
        }

        // Create the state tensor
        if (hasState)
        {
            runner.AddInput(graph[graphScope + StatePlacholderName][0], inputState);
        }

        // Create the observation tensors
        for (int obs_number = 0; obs_number < brain.brainParameters.cameraResolutions.Length; obs_number++)
        {
            runner.AddInput(graph[graphScope + ObservationPlaceholderName[obs_number]][0], observationMatrixList[obs_number]);
        }


        // Create the recurrent tensor
        if (hasRecurrent)
        {
            Dictionary <int, float[]> new_memories = new Dictionary <int, float[]>();

            runner.AddInput(graph[graphScope + RecurrentInPlaceholderName][0], inputOldMemories);
            runner.Fetch(graph[graphScope + RecurrentOutPlaceholderName][0]);
            float[,] recurrent_tensor = runner.Run()[1].GetValue() as float[, ];

            int i = 0;
            foreach (int k in agentKeys)
            {
                float[] m = new float[brain.brainParameters.memorySize];
                for (int j = 0; j < brain.brainParameters.memorySize; j++)
                {
                    m[j] = recurrent_tensor[i, j];
                }
                new_memories.Add(k, m);
                i++;
            }

            brain.SendMemories(new_memories);
        }

        Dictionary <int, float[]> actions = new Dictionary <int, float[]>();

        if (brain.brainParameters.actionSpaceType == StateType.continuous)
        {
            float[,] output = runner.Run()[0].GetValue() as float[, ];
            int i = 0;
            foreach (int k in agentKeys)
            {
                float[] a = new float[brain.brainParameters.actionSize];
                for (int j = 0; j < brain.brainParameters.actionSize; j++)
                {
                    a[j] = output[i, j];
                }
                actions.Add(k, a);
                i++;
            }
        }
        else if (brain.brainParameters.actionSpaceType == StateType.discrete)
        {
            long[,] output = runner.Run()[0].GetValue() as long[, ];
            int i = 0;
            foreach (int k in agentKeys)
            {
                float[] a = new float[1] {
                    (float)(output[i, 0])
                };
                actions.Add(k, a);
                i++;
            }
        }

        brain.SendActions(actions);
#endif
    }
Example #24
0
        public async Task <SentimentResult> Predict(string message)
        {
            if (string.IsNullOrWhiteSpace(message))
            {
                return(new SentimentResult(message, 0, 0, 1, TimeSpan.Zero));
            }

            try
            {
                var w = new Stopwatch();
                w.Start();

                //todo: clean message more!
                var words = message.SplitSpan(' ').Select(a => a.ToString()).ToArray();

                var graph = await _graph;

                using var session = new TFSession(graph);
                var runner = session.GetRunner();

                //Create input tensor (1 sentence, N words, 300 word vector dimensions)
                var input = new float[1, words.Length, 300];

                var tasks = words.Select(_wordVectors.Vector).ToList();

                //Copy in word vectors element by element
                var wordIndex = 0;
                foreach (var wordVector in tasks)
                {
                    var wv = await wordVector;
                    if (wv != null)
                    {
                        for (var i = 0; i < 300; i++)
                        {
                            input[0, wordIndex, i] = wv[i];
                        }
                    }

                    wordIndex++;
                }

                //Set tensor as input to the graph
                runner.AddInput(graph[_sentimentModelInput][0], input);

                //Tell the runner what result we want
                runner.Fetch(graph[_sentimentModelOutput][0]);

                //Execute the graph
                var results = runner.Run();

                //Fetch the result (we only asked for one)
                var result = (float[, ])results.Single().GetValue();

                return(new SentimentResult(message, result[0, 0], result[0, 1], result[0, 2], w.Elapsed));
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                throw;
            }
        }
Example #25
0
        public IEnumerable <FloatNetworkResult> Run(dynamic inputParameters, IEnumerable <string> outputLayerValues, Func <TFSession, IEnumerable <FloatNetworkResult> > customRunning = null)
        {
            using (var session = new TFSession(Graph))
            {
                var runner = session.GetRunner();

                void SetPlaceholderDouble(Layer inputLayer, object value)
                {
                    runner.AddInput(inputLayer.Output, new TFTensor((double[, , , ])value));
                }

                void SetPlaceholderFloat(Layer inputLayer, object value)
                {
                    runner.AddInput(inputLayer.Output, new TFTensor((float[, , , ])value));
                }

                Action <Layer, object> setPlaceholder;
                switch (Precision)
                {
                case PrecisionType.Float:
                    setPlaceholder = SetPlaceholderFloat;
                    break;

                case PrecisionType.Double:
                    setPlaceholder = SetPlaceholderDouble;
                    break;

                default:
                    throw new ArgumentOutOfRangeException();
                }
                var inputParameterValues = ((object)inputParameters)
                                           .GetType()
                                           .GetProperties()
                                           .ToDictionary(p => p.Name, p => p.GetValue(inputParameters));
                foreach (var item in inputParameterValues)
                {
                    var input = _layers[item.Key];

                    setPlaceholder(input, item.Value);
                }

                if (customRunning != null)
                {
                    return(customRunning(session));
                }
                else
                {
                    var outputs = new List <TFOutput>();

                    foreach (var outputLayerValue in outputLayerValues)
                    {
                        if (!_layers.ContainsKey(outputLayerValue))
                        {
                            throw new Exception($"Can't recover values from the output layer {outputLayerValue}");
                        }
                        var layer = _layers[outputLayerValue];

                        outputs.Add(layer.Output);
                    }
                    var run = runner.Fetch(outputs.ToArray()).Run();

                    var floatNetworks = new List <FloatNetworkResult>();
                    for (var i = 0; i < run.Length; i++)
                    {
                        var value = run[i];
                        floatNetworks.Add(new FloatNetworkResult(value.NumDims, value.GetValue(), outputLayerValues.ElementAt(i)));
                    }

                    return(floatNetworks);
                }
            }
        }
        public TFTensor 转换方法()
        {
            //if (b)
            //{
            //    contents = File.ReadAllBytes(file);

            //    // DecodeJpeg uses a scalar String-valued tensor as input.

            //    tensor = TFTensor.CreateString(contents);
            //    b = false;
            //}



            contents = File.ReadAllBytes(file);

            //// DecodeJpeg uses a scalar String-valued tensor as input.

            tensor = TFTensor.CreateString(contents);



            //contents  = null;

            // Construct a graph to normalize the image 归一化

            // Execute that graph to normalize this one image 执行图规范化这个形象
            //using (var session = new TFSession(graph1))
            //{
            //    var normalized = session.Run(
            //        inputs: new[] { input },
            //        inputValues: new[] { tensor },
            //        outputs: new[] { output });
            //    //tensor = null;
            //    return normalized[0];
            //}



            //using (var session = new TFSession(graph1))
            //{
            //var session = new TFSession(graph1);
            //    var runner = session.GetRunner();

            //    runner.AddInput(input, tensor);
            //    runner.Fetch(output);
            //    normalized = runner.Run();

            //    //session.CloseSession();
            //    session.DeleteSession();
            //    runner = null;
            //    //return normalized[0];

            ////}
            //return 1;



            using (var session = new TFSession(graph1))
            {
                try
                {
                    var runner = session.GetRunner();

                    runner.AddInput(input, tensor);
                    runner.Fetch(output);
                    normalized = runner.Run();

                    return(normalized[0]);
                }
                catch (Exception e)
                {
                    //Console.WriteLine(e);
                    throw;
                }
                finally
                {
                    session.Dispose();
                    //session.CloseSession();
                    //session.DeleteSession();
                }
            }

            //normalized = session.Run(
            //        inputs: new[] { input },
            //        inputValues: new[] { tensor },
            //        outputs: new[] { output });

            //tensor = null;

            //return normalized[0];
        }
Example #27
0
        public static void Main(string[] args)
        {
            // Construct an in-memory graph from the serialized form.
            var graph = new TFGraph();
            // Load the serialized GraphDef from a file.
            var model = File.ReadAllBytes(modelFile);
            var file  = "test.jpg";

            graph.Import(model, "");
            using (var session = new TFSession(graph))
            {
                var labels = File.ReadAllLines(labelsFile);
                // For multiple images, session.Run() can be called in a loop (and
                // concurrently). Alternatively, images can be batched since the model
                // accepts batches of image data as input.
                var tensor = ImageUtil.CreateTensorFromImageFile(file);
                var runner = session.GetRunner();
                runner.AddInput(graph["conv2d_1_input"][0], tensor).Fetch(graph["activation_5/Sigmoid"][0]);
                var output = runner.Run();
                // output[0].Value() is a vector containing probabilities of
                // labels for each image in the "batch". The batch size was 1.
                // Find the most probably label index.

                var result = output[0];
                var rshape = result.Shape;
                if (result.NumDims != 2 || rshape[0] != 1)
                {
                    var shape = "";
                    foreach (var d in rshape)
                    {
                        shape += $"{d} ";
                    }
                    shape = shape.Trim();
                    Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]");
                    Environment.Exit(1);
                }

                // You can get the data in two ways, as a multi-dimensional array, or arrays of arrays,
                // code can be nicer to read with one or the other, pick it based on how you want to process
                // it
                bool jagged = true;

                var   bestIdx = 0;
                float p = 0, best = 0;

                if (jagged)
                {
                    var probabilities = ((float[][])result.GetValue(jagged: true))[0];
                    for (int i = 0; i < probabilities.Length; i++)
                    {
                        if (probabilities[i] > best)
                        {
                            bestIdx = i;
                            best    = probabilities[i];
                        }
                    }
                }
                else
                {
                    var val = (float[, ])result.GetValue(jagged: false);

                    // Result is [1,N], flatten array
                    for (int i = 0; i < val.GetLength(1); i++)
                    {
                        if (val[0, i] > best)
                        {
                            bestIdx = i;
                            best    = val[0, i];
                        }
                    }
                }

                Console.WriteLine($"{file} best match: [{bestIdx}] {best * 100.0}% {labels[bestIdx]}");
                Console.ReadLine();
            }
        }
Example #28
0
        public int ExecuteGraph(IEnumerable <Tensor> inputs_it, IEnumerable <Tensor> outputs_it)
        {
            Profiler.BeginSample("TFSharpInferenceComponent.ExecuteGraph");
            Tensor[] inputs  = inputs_it.ToArray();
            Tensor[] outputs = outputs_it.ToArray();

            // TODO: Can/should we pre-allocate that?
            TFSession.Runner runner = m_session.GetRunner();

            inputs.ToList().ForEach((Tensor input) =>
            {
                if (input.Shape.Length == 0)
                {
                    var data = input.Data.GetValue(0);
                    if (input.DataType == typeof(int))
                    {
                        runner.AddInput(m_graph[input.Name][0], (int)data);
                    }
                    else
                    {
                        runner.AddInput(m_graph[input.Name][0], (float)data);
                    }
                }
                else
                {
                    runner.AddInput(m_graph[input.Name][0], input.Data);
                }
            });

            // TODO: better way to pre-allocate this?
            outputs.ToList().ForEach(s => runner.Fetch(s.Name));

            TFStatus status = new TFStatus();

            Profiler.BeginSample("TFSharpInferenceComponent.ExecuteGraph.RunnerRun");
            var out_tensors = runner.Run(status);

            Profiler.EndSample();

            if (!status.Ok)
            {
                Debug.LogError(status.StatusMessage);
                return(-1);
            }

            Debug.Assert(outputs.Length == out_tensors.Length);

            for (var i = 0; i < outputs.Length; ++i)
            {
                if (outputs[i].Shape.Length == 0)
                {
                    // Handle scalars
                    outputs[i].Data = Array.CreateInstance(outputs[i].DataType, new long[1] {
                        1
                    });
                    outputs[i].Data.SetValue(out_tensors[i].GetValue(), 0);
                }
                else
                {
                    outputs[i].Data = out_tensors[i].GetValue() as Array;
                }
            }

            Profiler.EndSample();
            // TODO: create error codes
            return(0);
        }
Example #29
0
        //public List<BoundingBox> NonMaxSuppression(List<BoundingBox> inputBoxes)
        //{
        //    List<BoundingBox> outputBoxes = new List<BoundingBox>();

        //    // array of picked indices
        //    List<int> pick = new List<int>();

        //    // array of each box coordinate
        //    List<float> x1List = inputBoxes.Select(x => x.x1).ToList();
        //    List<float> y1List = inputBoxes.Select(x => x.y1).ToList();
        //    List<float> x2List = inputBoxes.Select(x => x.x2).ToList();
        //    List<float> y2List = inputBoxes.Select(x => x.y2).ToList();

        //    // calculate area array
        //    List<float> area = new List<float>();
        //    foreach (BoundingBox box in inputBoxes)
        //        area.Add((box.x2 - box.x1) * (box.y2 - box.y1));

        //    List<float> idxs = new List<float>();
        //    foreach (float val in y2List) idxs.Add(val);
        //    idxs.Sort();

        //    // keep looping while some indexes still remain in the indexes list
        //    while (idxs.Count > 0)
        //    {

        //    }



        //    return outputBoxes;
        //}


        ///////////////////////////////////////////////////////////////////////////////////////////////////////////
        ///////////////////////////////////////////////////////////////////////////////////////////////////////////
        ///////////////////////////////////////////////////////////////////////////////////////////////////////////
        //
        // Dataflow Pipeline for Display



        public ITargetBlock <Tuple <ImagePackage, WriteableBitmap, WriteableBitmap, bool> > CreateDNNPipeline(
            string modelFile, Dictionary <int, string> classes,
            VideoEditsDatabase editsDB,
            int analysisWidth, int analysisHeight, TFDataType destinationDataType, float minConfidence, TextBlock tb1, TextBlock tb2,
            TaskScheduler uiTask,
            CancellationToken cancelToken)
        {
            // input parameters:
            // analysisWidth = pixel width expected by NN input
            // analysisHeight = pixel height expected by NN input
            // destinationDataType = data type expected by NN input

            TFGraph   l_graph;
            TFSession l_session;
            string    l_modelFile = modelFile;
            Dictionary <int, string> l_classes = classes;

            int               l_analysisWidth       = analysisWidth;
            int               l_analysisHeight      = analysisHeight;
            TFDataType        l_destinationDataType = destinationDataType;
            float             l_minConfidence       = minConfidence;
            CancellationToken l_cancelToken         = cancelToken;
            TaskScheduler     l_uiTask = uiTask;
            TextBlock         l_numDetectionsTextBlock = tb1;
            TextBlock         l_numTrackersTextBlock   = tb2;

            CentroidTracker m_tracker = new CentroidTracker(10);

            VideoEditsDatabase l_editsDB = editsDB;

            MultiTracker m_multiTracker = new MultiTracker();

            m_multiTracker.SetMaxNumFramesWithoutMatch(4);

            NonMaximumSuppression m_nms = new NonMaximumSuppression();

            m_nms.Init();

            l_graph = new TFGraph();
            byte[] l_model = File.ReadAllBytes(l_modelFile);
            l_graph.Import(new TFBuffer(l_model));
            l_session = new TFSession(l_graph);



            //////////////////////////////////////////////////////////////////////
            // PRE-PROCESS
            //
            // Construct graph to preprocess raw image
            // - The model was trained after with images scaled to resizeWidth X resizeHeight pixels.
            // - The colors, represented as R, G, B in 1-byte each were converted to
            //   float using (value - Mean)/Scale.

            const float Mean  = 0; // 117;
            const float Scale = 1;

            TFGraph  l_preprocessGraph = new TFGraph();
            TFOutput l_preprocessInput;
            TFOutput l_preprocessOutput;

            l_preprocessInput = l_preprocessGraph.Placeholder(TFDataType.UInt8);

            //l_preprocessOutput = l_preprocessGraph.Cast(l_preprocessGraph.Div(
            //    x: l_preprocessGraph.Sub(
            //        x: l_preprocessGraph.ResizeBilinear(
            //            images: l_preprocessGraph.ExpandDims(
            //                input: l_preprocessGraph.Cast(l_preprocessInput, DstT: TFDataType.Float),
            //                dim: l_preprocessGraph.Const(0, "make_batch")),
            //            size: l_preprocessGraph.Const(new int[] { l_analysisWidth, l_analysisHeight }, "size")),
            //        y: l_preprocessGraph.Const(Mean, "mean")),
            //    y: l_preprocessGraph.Const(Scale, "scale")), l_destinationDataType);

            //l_preprocessOutput = l_preprocessGraph.Cast(
            //                                l_preprocessGraph.ResizeBilinear(
            //                                        images:l_preprocessGraph.ExpandDims(
            //                                                                    input: l_preprocessInput,
            //                                                                    dim: l_preprocessGraph.Const(0, "make_batch")),
            //                                        size: l_preprocessGraph.Const(new int[] { l_analysisWidth, l_analysisHeight }, "size")),
            //                                l_destinationDataType);

            l_preprocessOutput = l_preprocessGraph.ExpandDims(input: l_preprocessInput, dim: l_preprocessGraph.Const(0, "make_batch"));

            TFSession l_preprocessSession = new TFSession(l_preprocessGraph);

            ////////////////////////////////////////////////////////////////////////////////////////////////
            // POST-PROCESS
            //
            //TFGraph l_postprocessGraph = new TFGraph();
            //TFOutput l_postprocessInput;
            //TFOutput l_postprocessOutput;

            //l_postprocessInput = l_postprocessGraph.Placeholder(TFDataType.Float);

            //l_postprocessOutput = l_postprocessGraph.NonMaxSuppression(boxes: l_postprocessInput, scores: l_postprocessGraph.Const(0, "make_batch"));

            //TFSession l_postprocessSession = new TFSession(l_postprocessGraph);


            ////////////////////////////////////////////////////////////////////////////////////////////////
            // DATAFLOW BLOCKS

            var PreprocessImage = new TransformBlock <Tuple <ImagePackage, WriteableBitmap, WriteableBitmap, bool>,
                                                      Tuple <ImagePackage, WriteableBitmap, WriteableBitmap, bool, TFTensor> >(inputData =>
            {
                // INPUTS:
                //  item 1 - ImagePackage which contains: image data (byte[]), timestamp (double), width (int), height (int), numchannels (int)
                //  item 2 - bitmap used to display image
                //  item 3 - bitmap used to display overlay (where rectangle is draw)
                //  item 4 - bool flag indicating whether to enable tracking

                // OUTPUT:
                //  tensor holding the preprocessed image
                ImagePackage imagePackage = inputData.Item1;
                byte[] data             = imagePackage.data;
                double timestamp        = imagePackage.timestamp;
                int imageWidth          = imagePackage.width;
                int imageHeight         = imagePackage.height;
                int numChannels         = imagePackage.numChannels;
                WriteableBitmap bitmap  = inputData.Item2;
                WriteableBitmap overlay = inputData.Item3;
                bool useTracker         = inputData.Item4;

                try
                {
                    var rawInputTensor = TFTensor.FromBuffer(new TFShape(imageHeight, imageWidth, numChannels), data, 0,
                                                             imageWidth * imageHeight * numChannels);


                    var preprocessed = l_preprocessSession.Run(
                        inputs: new[] { l_preprocessInput },
                        inputValues: new[] { rawInputTensor },
                        outputs: new[] { l_preprocessOutput });

                    return(Tuple.Create <ImagePackage, WriteableBitmap, WriteableBitmap, bool, TFTensor>(imagePackage, bitmap, overlay, useTracker, preprocessed[0]));
                }
                catch (Exception ex)
                {
                    m_lastErrorMsg = ex.Message;
                    return(null);
                }
            },
                                                                                                                               new ExecutionDataflowBlockOptions
            {
                // TaskScheduler = uiTask,
                CancellationToken      = cancelToken,
                MaxDegreeOfParallelism = 1
            });



            var EvaluateImage = new TransformBlock <Tuple <ImagePackage, WriteableBitmap, WriteableBitmap, bool, TFTensor>,
                                                    Tuple <ImagePackage, WriteableBitmap, WriteableBitmap, List <BoundingBox>, int> >(inputData =>
            {
                // INPUTS:
                //  item 1 - ImagePackage which contains: image data (byte[]), timestamp (double), width (int), height (int), numchannels (int)
                //  item 2 - bitmap used to display image
                //  item 3 - bitmap used to display overlay (where rectangle is draw)
                //  item 4 - bool flag indicating whether to enable tracking
                //  item 5 - Tensorflow Tensor holding the preprocessed image ready for submission to the DNN

                // OUTPUTS:
                //  list of bounding boxes of detected objects
                //  number of active trackers

                ImagePackage imagePackage = inputData.Item1;
                byte[] data             = imagePackage.data;
                double timestamp        = imagePackage.timestamp;
                int imageWidth          = imagePackage.width;
                int imageHeight         = imagePackage.height;
                WriteableBitmap bitmap  = inputData.Item2;
                WriteableBitmap overlay = inputData.Item3;
                bool useTracker         = inputData.Item4;
                TFTensor tensor         = inputData.Item5;


                int numDetections = 0;
                int numTrackers   = 0;

                try
                {
                    var runner = l_session.GetRunner();
                    List <BoundingBox> boxList = new List <BoundingBox>();

                    runner
                    .AddInput(l_graph["image_tensor"][0], tensor)
                    .Fetch(
                        l_graph["detection_boxes"][0],
                        l_graph["detection_scores"][0],
                        l_graph["detection_classes"][0],
                        l_graph["num_detections"][0]);

                    var output = runner.Run();

                    var boxes    = (float[, , ])output[0].GetValue(jagged: false);
                    var scores   = (float[, ])output[1].GetValue(jagged: false);
                    var _classes = (float[, ])output[2].GetValue(jagged: false);
                    var num      = (float[])output[3].GetValue(jagged: false);

                    int numberOfImages = 1;
                    int ndx            = numberOfImages - 1;

                    for (int i = 0; i < num[ndx]; i++)
                    {
                        if (scores[ndx, i] >= l_minConfidence)
                        {
                            int classID = (int)_classes[ndx, i];

                            BoundingBox box = new BoundingBox(boxes[ndx, i, 1], boxes[ndx, i, 0], boxes[ndx, i, 3], boxes[ndx, i, 2],
                                                              classID, 0, scores[ndx, i]);
                            boxList.Add(box);
                        }
                    }

                    // perform NMS
                    boxList = m_nms.Execute(boxList, 0.50f);

                    // add boxes to edits database
                    int ii = 0;
                    if (boxList.Count > 0)
                    {
                        ii++;
                    }
                    l_editsDB.AddRedactionBoxesFromDNN(boxList, timestamp, imageWidth, imageHeight);

                    numDetections = boxList.Count;

                    if (useTracker)
                    {
                        List <BoundingBox> trackedBoxes = m_multiTracker.Update(data, imageWidth, imageHeight, boxList);

                        numTrackers = trackedBoxes.Count;

                        boxList.AddRange(trackedBoxes);

                        boxList = m_nms.Execute(boxList, 0.50f);
                    }
                    else
                    {
                        m_multiTracker.ClearTrackers();
                    }

                    return(Tuple.Create <ImagePackage, WriteableBitmap, WriteableBitmap, List <BoundingBox>, int>(imagePackage, bitmap, overlay, boxList, numTrackers));
                }
                catch (Exception ex)
                {
                    m_lastErrorMsg = ex.Message;
                    return(null);
                }
            },
                                                                                                                                      new ExecutionDataflowBlockOptions
            {
                // TaskScheduler = uiTask,
                CancellationToken      = cancelToken,
                MaxDegreeOfParallelism = 1
            });



            var PlotResults = new ActionBlock <Tuple <ImagePackage, WriteableBitmap, WriteableBitmap, List <BoundingBox>, int> >(inputData =>
            {
                ImagePackage imagePackage = inputData.Item1;
                byte[] data              = imagePackage.data;
                double timestamp         = imagePackage.timestamp;
                int imageWidth           = imagePackage.width;
                int imageHeight          = imagePackage.height;
                int numChannels          = imagePackage.numChannels;
                WriteableBitmap bitmap   = inputData.Item2;
                WriteableBitmap overlay  = inputData.Item3;
                List <BoundingBox> boxes = inputData.Item4;
                int numDetections        = boxes.Count;
                int numTrackers          = inputData.Item5;

                try
                {
                    System.Windows.Media.PixelFormat pixelFormat = bitmap.Format;
                    int bpp = pixelFormat.BitsPerPixel;

                    if (bpp > 24) // handle BGRA 32-bit images (4 bytes per pixel)
                    {
                        byte[] data1 = new byte[bitmap.PixelWidth * bitmap.PixelHeight * 4];
                        for (int r = 0; r < bitmap.PixelHeight; r++)
                        {
                            for (int c = 0; c < bitmap.PixelWidth; c++)
                            {
                                int ndx  = (r * bitmap.PixelWidth * 3) + (c * 3);
                                int ndx1 = (r * bitmap.PixelWidth * 4) + (c * 4);

                                data1[ndx1 + 0] = data[ndx + 0];
                                data1[ndx1 + 1] = data[ndx + 1];
                                data1[ndx1 + 2] = data[ndx + 2];
                                data1[ndx1 + 3] = 255;
                            }
                        }

                        Int32Rect rect = new Int32Rect(0, 0, bitmap.PixelWidth, bitmap.PixelHeight);
                        bitmap.Lock();
                        bitmap.WritePixels(rect, data1, bitmap.PixelWidth * 4, 0);
                        bitmap.Unlock();
                    }
                    else
                    {
                        // handle BGR 24-bit images
                        Int32Rect rect = new Int32Rect(0, 0, bitmap.PixelWidth, bitmap.PixelHeight);
                        bitmap.Lock();
                        bitmap.WritePixels(rect, data, bitmap.PixelWidth * 3, 0);
                        bitmap.Unlock();
                    }

                    overlay.Clear();
                    foreach (BoundingBox box in boxes)
                    {
                        int x1 = (int)(box.x1 * bitmap.PixelWidth);
                        int y1 = (int)(box.y1 * bitmap.PixelHeight);
                        int x2 = (int)(box.x2 * bitmap.PixelWidth);
                        int y2 = (int)(box.y2 * bitmap.PixelHeight);
                        overlay.DrawRectangle(x1, y1, x2, y2, Colors.Red);
                        overlay.DrawRectangle(x1 + 1, y1 + 1, x2 - 1, y2 - 1, Colors.Red);
                        overlay.DrawRectangle(x1 + 2, y1 + 2, x2 - 2, y2 - 2, Colors.Red);
                        //overlay.FillRectangle(x1, y1, x2, y2, Colors.Red);
                    }

                    if (l_numDetectionsTextBlock != null)
                    {
                        l_numDetectionsTextBlock.Text = numDetections.ToString();
                    }
                    if (l_numTrackersTextBlock != null)
                    {
                        l_numTrackersTextBlock.Text = numTrackers.ToString();
                    }
                }
                catch (Exception ex)
                {
                    m_lastErrorMsg = ex.Message;
                    return;
                }
            },
                                                                                                                                 new ExecutionDataflowBlockOptions
            {
                TaskScheduler          = l_uiTask,
                CancellationToken      = cancelToken,
                MaxDegreeOfParallelism = 1
            }
                                                                                                                                 );


            PreprocessImage.LinkTo(EvaluateImage);
            EvaluateImage.LinkTo(PlotResults);
            return(PreprocessImage);
        }
Example #30
0
    // run the CNN
    void evaluate()
    {
        // only run CNN if we have enough accelerometer values
        if (accelX.Count == inputWidth)
        {
            // convert from list to tensor
            // if tensor is 1 under, add dummy last value
            int i;
            for (i = 0; i < accelX.Count; i++)
            {
                inputTensor[0, 0, i, 0] = accelX[i];
                test = inputTensor[0, 0, i, 0];
            }
            if (i != inputWidth)
            {
                inputTensor[0, 0, inputWidth - 1, 0] = 0;
            }

            for (i = 0; i < accelY.Count; i++)
            {
                inputTensor[0, 0, i, 1] = accelY[i];
            }
            if (i != inputWidth)
            {
                inputTensor[0, 0, inputWidth - 1, 1] = 0;
            }

            for (i = 0; i < accelZ.Count; i++)
            {
                inputTensor[0, 0, i, 2] = accelZ[i];
            }
            if (i != inputWidth)
            {
                inputTensor[0, 0, inputWidth - 1, 2] = 0;
            }

            // tensor output variable
            float[,] recurrentTensor;

            // create tensorflow model
            using (var graph = new TFGraph())
            {
                graph.Import(graphModel.bytes);
                var session = new TFSession(graph);
                var runner  = session.GetRunner();

                // do input tensor list to array and make it one dimensional
                TFTensor input = inputTensor;


                // set up input tensor and input
                runner.AddInput(graph["input_placeholder_x"][0], input);

                // set up output tensor
                runner.Fetch(graph["output_node"][0]);

                // run model
                recurrentTensor = runner.Run()[0].GetValue() as float[, ];
                here            = true;

                // dispose resources - keeps cnn from breaking down later
                session.Dispose();
                graph.Dispose();
            }

            // find the most confident answer
            float highVal = 0;
            int   highInd = -1;
            sum = 0f;

            // *MAKE SURE ACTIVITYINDEXCHOICES MATCHES THE NUMBER OF CHOICES*
            for (int j = 0; j < activityIndexChoices; j++)
            {
                confidence = recurrentTensor[0, j];
                if (highInd > -1)
                {
                    if (recurrentTensor[0, j] > highVal)
                    {
                        highVal = confidence;
                        highInd = j;
                    }
                }
                else
                {
                    highVal = confidence;
                    highInd = j;
                }

                // debugging - sum should = 1 at the end
                sum += confidence;
            }

            // debugging
            test1 = recurrentTensor[0, 0];
            test2 = recurrentTensor[0, 1];

            // used in movement to see if we should be moving
            index = highInd;
            countCNN++;
        }
    }