Beispiel #1
0
        const float Scale = 128f; // --input_std, should be 128 for mobilenet, don't use 1 or it won't work

        public Detection()
        {
            //Load labels
            labels = File.ReadAllLines("tfdata/output_labels.txt");

            // Construct an in-memory graph from the serialized form.
            graph = new TFGraph();
            // Load the serialized GraphDef from a file.
            byte[] model = File.ReadAllBytes("tfdata/output_graph.pb");
            graph.Import(model, "");


            TFSessionOptions TFOptions = new TFSessionOptions();

            // This code helps with using the GPU version of tensorflowlib on Windows to avoid eating all of your RAM
            unsafe
            {
                // These bytes represent the following settings:
                // config.gpu_options.allow_growth = True
                // config.gpu_options.per_process_gpu_memory_fraction = 0.3
                byte[] GPUConfig = new byte[] { 0x32, 0x0b, 0x09, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0xd3, 0x3f, 0x20, 0x01 };

                fixed(void *ptr = &GPUConfig[0])
                {
                    TFOptions.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                }
            }

            // Set the session
            session = new TFSession(graph, TFOptions);
        }
Beispiel #2
0
        public static TFSessionOptions CreateOptions()
        {
            TFSessionOptions Options = new TFSessionOptions();

            //byte[][] Serialized = new byte[][]
            //{
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x30 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x31 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x32 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x33 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x34 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x35 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x36 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x37 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x38 },
            //    new byte[] { 0x32, 0x5, 0x20, 0x1, 0x2a, 0x1, 0x39 },
            //    new byte[] { 0x32, 0x6, 0x20, 0x1, 0x2a, 0x2, 0x31, 0x30 },
            //    new byte[] { 0x32, 0x6, 0x20, 0x1, 0x2a, 0x2, 0x31, 0x31 },
            //    new byte[] { 0x32, 0x6, 0x20, 0x1, 0x2a, 0x2, 0x31, 0x32 },
            //    new byte[] { 0x32, 0x6, 0x20, 0x1, 0x2a, 0x2, 0x31, 0x33 },
            //    new byte[] { 0x32, 0x6, 0x20, 0x1, 0x2a, 0x2, 0x31, 0x34 },
            //    new byte[] { 0x32, 0x6, 0x20, 0x1, 0x2a, 0x2, 0x31, 0x35 }
            //};
            byte[] Serialized = { 0x32, 0x2, 0x20, 0x1, 0x38, 0x1 };

            TFStatus Stat = new TFStatus();

            unsafe
            {
                fixed(byte *SerializedPtr = Serialized)
                Options.SetConfig(new IntPtr(SerializedPtr), Serialized.Length, Stat);
            }

            return(Options);
        }
        /// <summary>
        /// ctor.
        /// </summary>
        public MultiPerson2DPoseEstimationWindow()
        {
            InitializeComponent();

            xRate = resolutionX / (float)detectionSize;
            yRate = resolutionY / (float)detectionSize;

            TFSessionOptions TFOptions = new TFSessionOptions();

            unsafe
            {
                //It will only work if you install the tensorflow-batteries-windows-x64-gpu NuGet package.
                //WARNING: It just is compatible with NVDA video card which can run CUDA.
                //         You can need to copy the cudnn64_7.dll to the execution folder from the cudnn.zip and install cudnn properly, link below:
                //         https://docs.nvidia.com/deeplearning/sdk/cudnn-install/index.html
                //
                //https://github.com/migueldeicaza/TensorFlowSharp/issues/206
                byte[] GPUConfig = new byte[] { 0x32, 0x0b, 0x09, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0xd3, 0x3f, 0x20, 0x01 };

                fixed(void *ptr = &GPUConfig[0])
                {
                    TFOptions.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                }
            }

            graph = new TFGraph();
            graph.Import(File.ReadAllBytes(modelPath));
            session = new TFSession(graph);
        }
Beispiel #4
0
        private static void Run(Options o)
        {
            using var graph = new TFGraph();
            graph.Import(File.ReadAllBytes(o.Model));

            TFSessionOptions options = new TFSessionOptions();

            if (o.ForceGPU)
            {
                unsafe
                {
                    byte[] GPUConfig = new byte[] { 0x32, 0x02, 0x20, 0x01 };

                    fixed(void *ptr = &GPUConfig[0])
                    options.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                }
            }

            using (var session = new TFSession(graph, options))
            {
                var runner = session.GetRunner();

                if (o.Verbose)
                {
                    foreach (var e in graph.GetEnumerator())
                    {
                        Console.WriteLine($"OT: {e.OpType} {e.Name}");
                    }
                }

                if (o.Input.EndsWith(".png"))
                {
                    using (var image = Image.Load(o.Input))
                    {
                        var iph = graph[o.InputPlaceholderName];

                        var slice = new float[1, image.Width, image.Height, 1];
                        WriteImageToSlice(image, slice);
                        runner.AddInput($"{o.InputPlaceholderName}:0", slice);
                        runner.Fetch($"{o.FetchFrom}:0");

                        var output = runner.Run();
                        var result = output[0];
                        var r_v    = (float[][])result.GetValue(true);

                        if (o.Output.EndsWith(".txt"))
                        {
                            using (var f = new StreamWriter(o.Output, false))
                                foreach (var v in r_v[0])
                                {
                                    f.WriteLine(v.ToString(CultureInfo.InvariantCulture));
                                }
                        }
                    }
                }
            }
        }
        public TensorProcessor(byte[] model, string device = "/CPU:0")
        {
            _graph = new TFGraph();
            var options = new TFImportGraphDefOptionsExt();

            //options.SetDefaultDevice(device);
            _graph.Import(model, options);

            TFSessionOptions TFOptions = new TFSessionOptions();

            unsafe
            {
                byte[] GPUConfig = { 0x38, 0x1 };

                fixed(void *ptr = &GPUConfig[0])
                {
                    TFOptions.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                }
            }
            _session = new TFSession(_graph, TFOptions);

            Console.WriteLine($"=> Session for {device} created with: {String.Join(',', _session.ListDevices().Select(x => x.Name).ToList())}");
        }
Beispiel #6
0
        public override IObservable <Pose> Process(IObservable <IplImage> source)
        {
            return(Observable.Defer(() =>
            {
                TFSessionOptions options = new TFSessionOptions();
                unsafe
                {
                    byte[] GPUConfig = new byte[] { 0x32, 0x02, 0x20, 0x01 };
                    fixed(void *ptr = &GPUConfig[0])
                    {
                        options.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                    }
                }

                var graph = new TFGraph();
                var session = new TFSession(graph, options, null);
                var bytes = File.ReadAllBytes(ModelFileName);
                graph.Import(bytes);

                IplImage temp = null;
                TFTensor tensor = null;
                TFSession.Runner runner = null;
                var config = ConfigHelper.PoseConfig(PoseConfigFileName);
                return source.Select(input =>
                {
                    var poseScale = 1.0;
                    const int TensorChannels = 3;
                    var frameSize = input.Size;
                    var scaleFactor = ScaleFactor;
                    if (scaleFactor.HasValue)
                    {
                        poseScale = scaleFactor.Value;
                        frameSize.Width = (int)(frameSize.Width * poseScale);
                        frameSize.Height = (int)(frameSize.Height * poseScale);
                        poseScale = 1.0 / poseScale;
                    }

                    if (tensor == null || tensor.GetTensorDimension(1) != frameSize.Height || tensor.GetTensorDimension(2) != frameSize.Width)
                    {
                        tensor = new TFTensor(
                            TFDataType.Float,
                            new long[] { 1, frameSize.Height, frameSize.Width, TensorChannels },
                            frameSize.Width * frameSize.Height * TensorChannels * sizeof(float));
                        runner = session.GetRunner();
                        runner.AddInput(graph["Placeholder"][0], tensor);
                        runner.Fetch(graph["concat_1"][0]);
                    }

                    var frame = input;
                    if (frameSize != input.Size)
                    {
                        if (temp == null || temp.Size != frameSize)
                        {
                            temp = new IplImage(frameSize, input.Depth, input.Channels);
                        }

                        CV.Resize(input, temp);
                        frame = temp;
                    }

                    using (var image = new IplImage(frameSize, IplDepth.F32, TensorChannels, tensor.Data))
                    {
                        CV.Convert(frame, image);
                    }

                    // Run the model
                    var output = runner.Run();

                    // Fetch the results from output:
                    var poseTensor = output[0];
                    var pose = new Mat((int)poseTensor.Shape[0], (int)poseTensor.Shape[1], Depth.F32, 1, poseTensor.Data);
                    var result = new Pose(input);
                    var threshold = MinConfidence;
                    for (int i = 0; i < pose.Rows; i++)
                    {
                        BodyPart bodyPart;
                        bodyPart.Name = config[i];
                        bodyPart.Confidence = (float)pose.GetReal(i, 2);
                        if (bodyPart.Confidence < threshold)
                        {
                            bodyPart.Position = new Point2f(float.NaN, float.NaN);
                        }
                        else
                        {
                            bodyPart.Position.X = (float)(pose.GetReal(i, 1) * poseScale);
                            bodyPart.Position.Y = (float)(pose.GetReal(i, 0) * poseScale);
                        }
                        result.Add(bodyPart);
                    }
                    return result;
                });
            }));
        }
Beispiel #7
0
        public static void Main(string [] args)
        {
            TFSessionOptions options = new TFSessionOptions();

            unsafe
            {
                //byte[] PUConfig = new byte[] { 0x32, 0x05, 0x20, 0x01, 0x2a, 0x01, 0x30, 0x38, 0x01 }; //gpu
                byte[] PUConfig = new byte[] { 0x0a, 0x07, 0x0a, 0x03, 0x67, 0x70, 0x75, 0x10, 0x00 }; //cpu
                fixed(void *ptr = &PUConfig[0])
                {
                    options.SetConfig(new IntPtr(ptr), PUConfig.Length);
                }
            }
            TFSession session;
            var       graph = new TFGraph();

            using (TFSession sess = new TFSession(graph, options))
                using (var metaGraphUnused = new TFBuffer())
                {
                    session = sess.FromSavedModel(options, null, "tzb", new[] { "serve" }, graph, metaGraphUnused);
                    IEnumerable <TensorFlow.DeviceAttributes> iem = session.ListDevices();
                    foreach (object obj in iem)
                    {
                        Console.WriteLine(((DeviceAttributes)obj).Name);
                    }
                    var labels = File.ReadAllLines("tzb/label.txt");
                    //打印节点名称

                    /*IEnumerable<TensorFlow.TFOperation> iem = graph.GetEnumerator();
                     * foreach (object obj in iem)
                     * {
                     *  Console.WriteLine(((TFOperation)obj).Name);
                     * }*/
                    //while(true)
                    float[] eimg = new float[224 * 224];
                    for (int i = 0; i < 224 * 224; i++)
                    {
                        eimg[i] = 0;
                    }
                    TFTensor ten = TFTensor.FromBuffer(tfs, eimg, 0, 224 * 224 * 1);
                    for (int j = 0; j < 3; j++)
                    {
                        var runner = session.GetRunner();
                        runner.AddInput(graph["images"][0], ten).Fetch(graph["classes"].Name);
                        var output = runner.Run();
                    }
                    ten.Dispose();
                    string[] files = Directory.GetFiles("tzb/images/defect", "*.*");
                    //while(true)
                    foreach (string file in files)
                    {
                        DateTime bft = DateTime.Now;
                        //var tensor = Image2Tensor(file);
                        //break;
                        var tensor = ImageUtil.CreateTensorFromImageFile(file);
                        //TFTensor tensor = TFTensor.FromBuffer(tfs, eimg, 0, 224 * 224);
                        var runner = session.GetRunner();
                        runner.AddInput(graph["images"][0], tensor).Fetch(graph["classes"].Name);
                        var      output = runner.Run();
                        DateTime aft    = DateTime.Now;
                        TimeSpan ts     = aft.Subtract(bft);
                        System.Threading.Thread.Sleep(50);
                        var result = output[0];
                        int class_ = ((int[])result.GetValue(jagged: true))[0];
                        Console.WriteLine(file + " best_match: " + class_ + " " + labels[class_] + " time: " + ts.TotalMilliseconds);
                    }
                }
        }
Beispiel #8
0
        public override IObservable <Pose> Process(IObservable <IplImage> source)
        {
            return(Observable.Defer(() =>
            {
                TFSessionOptions options = new TFSessionOptions();
                unsafe
                {
                    byte[] GPUConfig = new byte[] { 0x32, 0x02, 0x20, 0x01 };
                    fixed(void *ptr = &GPUConfig[0])
                    {
                        options.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                    }
                }

                var graph = new TFGraph();
                var session = new TFSession(graph, options, null);
                var bytes = File.ReadAllBytes(ModelFileName);
                graph.Import(bytes);

                TFTensor tensor = null;
                var config = ConfigHelper.PoseConfig(PoseConfigFileName);
                return source.Select(input =>
                {
                    if (tensor == null || tensor.GetTensorDimension(1) != input.Height || tensor.GetTensorDimension(2) != input.Width)
                    {
                        tensor = new TFTensor(
                            TFDataType.Float,
                            new long[] { 1, input.Height, input.Width, 3 },
                            input.WidthStep * input.Height * 4);
                    }

                    using (var image = new IplImage(input.Size, IplDepth.F32, 3, tensor.Data))
                    {
                        CV.Convert(input, image);
                    }

                    var runner = session.GetRunner();
                    runner.AddInput(graph["Placeholder"][0], tensor);
                    runner.Fetch(graph["concat_1"][0]);

                    // Run the model
                    var output = runner.Run();

                    // Fetch the results from output:
                    var poseTensor = output[0];
                    var pose = new Mat((int)poseTensor.Shape[0], (int)poseTensor.Shape[1], Depth.F32, 1, poseTensor.Data);
                    var result = new Pose(input);
                    var threshold = MinConfidence;
                    for (int i = 0; i < pose.Rows; i++)
                    {
                        BodyPart bodyPart;
                        bodyPart.Name = config[i];
                        bodyPart.Confidence = (float)pose.GetReal(i, 2);
                        if (bodyPart.Confidence < threshold)
                        {
                            bodyPart.Position = new Point2f(float.NaN, float.NaN);
                        }
                        else
                        {
                            bodyPart.Position.X = (float)pose.GetReal(i, 1);
                            bodyPart.Position.Y = (float)pose.GetReal(i, 0);
                        }
                        result.Add(bodyPart);
                    }
                    return result;
                });
            }));
        }