private async void ODMainForm_Load(object sender, EventArgs e) { try { toolStripStatusLabel.Text = "Start to download model and labels"; await DownloadDefaultModelAsync(Program.ModelPath); await DownloadDefaultTexts(Program.LabelsPath); toolStripStatusLabel.Text = "Start to load model."; _catalog = CatalogUtil.ReadCatalogItems(Program.LabelsPath); _graph = new TFGraph(); var model = File.ReadAllBytes(Program.ModelPath); _graph.Import(new TFBuffer(model)); _session = new TFSession(_graph); toolStripStatusLabel.Text = "Initialization done."; await DrawBoxesAsync(Program.DemoImagePath); loadPictureToolStripMenuItem.Enabled = true; } catch (Exception ex) { MessageBox.Show(ex.ToString(), "Error during downloading model and labels", MessageBoxButtons.OK, MessageBoxIcon.Error); toolStripStatusLabel.Text = "Error during downloading model and labels"; } }
// Use this for initialization IEnumerator Start() { #if UNITY_ANDROID TensorFlowSharp.Android.NativeBinding.Init(); #endif pixels = new float[INPUT_SIZE * INPUT_SIZE * 3]; _catalog = CatalogUtil.ReadCatalogItems(labelMap.text); Debug.Log("Loading graph..."); graph = new TFGraph(); graph.Import(model.bytes); session = new TFSession(graph); Debug.Log("Graph Loaded!!!"); /*foreach(var i in graph.GetEnumerator()) * { * Debug.Log(i.Name); * }*/ //set style of labels and boxes style.fontSize = 30; style.contentOffset = new Vector2(0, 50); style.normal.textColor = objectColor; // Begin our heavy work on a new thread. _thread = new Thread(ThreadedWork); _thread.Start(); //do this to avoid warnings processingImage = true; yield return(new WaitForEndOfFrame()); processingImage = false; }
// Use this for initialization IEnumerator Start() { #if UNITY_ANDROID TensorFlowSharp.Android.NativeBinding.Init(); #endif pixels = new byte[INPUT_SIZE * INPUT_SIZE * 3]; _catalog = CatalogUtil.ReadCatalogItems(labelMap.text); Debug.Log("Loading graph..."); graph = new TFGraph(); graph.Import(model.bytes); session = new TFSession(graph); Debug.Log("Graph Loaded!!!"); //set style of labels and boxes style.normal.background = tex; style.alignment = TextAnchor.UpperCenter; style.fontSize = 80; style.fontStyle = FontStyle.Bold; style.contentOffset = new Vector2(0, 50); style.normal.textColor = objectColor; // Begin our heavy work on a new thread. _thread = new Thread(ThreadedWork); _thread.Start(); //do this to avoid warnings processingImage = true; yield return(new WaitForEndOfFrame()); processingImage = false; }
public ObjectRecognizer(byte[] model, string labels) { #if UNITY_ANDROID TensorFlowSharp.Android.NativeBinding.Init(); #endif _catalog = CatalogUtil.ReadCatalogItems(labels).ToDictionary(l => l.Id); _graph = new TFGraph(); _graph.Import(new TFBuffer(model)); }
private void doDetect() { //options.Parse(args); if (_catalogPath == null) { _catalogPath = DownloadDefaultTexts(_currentDir); } if (_modelPath == null) { _modelPath = DownloadDefaultModel(_currentDir); } _catalog = CatalogUtil.ReadCatalogItems(_catalogPath); var fileTuples = new List <(string input, string output)>() { (_input, _output) }; string modelFile = _modelPath; using (var graph = new TFGraph()) { var model = File.ReadAllBytes(modelFile); graph.Import(new TFBuffer(model)); using (var session = new TFSession(graph)) { Console.WriteLine("Detecting objects"); foreach (var tuple in fileTuples) { var tensor = ImageUtil.CreateTensorFromImageFile(tuple.input, TFDataType.UInt8); var runner = session.GetRunner(); runner .AddInput(graph["image_tensor"][0], tensor) .Fetch( graph["detection_boxes"][0], graph["detection_scores"][0], graph["detection_classes"][0], graph["num_detections"][0]); var output = runner.Run(); var boxes = (float[, , ])output[0].GetValue(jagged: false); var scores = (float[, ])output[1].GetValue(jagged: false); var classes = (float[, ])output[2].GetValue(jagged: false); var num = (float[])output[3].GetValue(jagged: false); DrawBoxes(boxes, scores, classes, tuple.input, tuple.output, MIN_SCORE_FOR_OBJECT_HIGHLIGHTING); Console.WriteLine($"Done. See {_output_relative}"); } } } }
void DrawBoxes(float[,,] boxes, float[,] scores, float[,] classes, string inputFile, string outputFile, double minScore) { _catalog = CatalogUtil.ReadCatalogItems("object-detection.pbtxt"); var x = boxes.GetLength(0); var y = boxes.GetLength(1); var z = boxes.GetLength(2); float ymin = 0, xmin = 0, ymax = 0, xmax = 0; using (var editor = new ImageEditor(inputFile, outputFile)) { for (int i = 0; i < x; i++) { for (int j = 0; j < y; j++) { if (scores[i, j] < minScore) { continue; } for (int k = 0; k < z; k++) { var box = boxes[i, j, k]; switch (k) { case 0: ymin = box; break; case 1: xmin = box; break; case 2: ymax = box; break; case 3: xmax = box; break; } } int value = Convert.ToInt32(classes[i, j]); CatalogItem catalogItem = _catalog.FirstOrDefault(item => item.Id == value); editor.AddBox(xmin, xmax, ymin, ymax, $"{catalogItem.DisplayName} : {(scores[i, j] * 100).ToString("0")}%"); } } } }
public TFWrapper() { //options.Parse(args); if (_catalogPath == null) { _catalogPath = DownloadDefaultTexts(_currentDir); } if (_modelPath == null) { _modelPath = DownloadDefaultModel(_currentDir); } _catalog = CatalogUtil.ReadCatalogItems(_catalogPath); var fileTuples = new List <(string input, string output)>() { (_input, _output) }; graph = new TFGraph(); var model = File.ReadAllBytes(_modelPath); graph.Import(new TFBuffer(model)); session = new TFSession(graph); var runner = session.GetRunner(); runner .AddInput(graph["image_tensor"][0], ImageUtil.CreateTensorFromImageFile(fileTuples[0].input, TFDataType.UInt8)) .Fetch( graph["detection_boxes"][0], graph["detection_scores"][0], graph["detection_classes"][0], graph["num_detections"][0]); TFTensor[] output = null; output = runner.Run(); }
// Use this for initialization void Start() { #if UNITY_ANDROID TensorFlowSharp.Android.NativeBinding.Init(); #endif pixels = new byte[INPUT_SIZE * INPUT_SIZE * 3]; _catalog = CatalogUtil.ReadCatalogItems(labelMap.text); graph = new TFGraph(); graph.Import(model.bytes); session = new TFSession(graph); style.normal.background = tex; style.alignment = TextAnchor.UpperCenter; style.fontSize = 80; style.fontStyle = FontStyle.Bold; style.contentOffset = new Vector2(0, 50); style.normal.textColor = objectColor; // Begin our heavy work on a new thread. _thread = new Thread(ThreadedWork); _thread.Start(); }
/// <summary> /// Run the ExampleObjectDetection util from command line. Following options are available: /// input_image - optional, the path to the image for processing (the default is 'test_images/input.jpg') /// output_image - optional, the path where the image with detected objects will be saved (the default is 'test_images/output.jpg') /// catalog - optional, the path to the '*.pbtxt' file (by default, 'mscoco_label_map.pbtxt' been loaded) /// model - optional, the path to the '*.pb' file (by default, 'frozen_inference_graph.pb' model been used, but you can download any other from here /// https://github.com/tensorflow/models/blob/master/object_detection/g3doc/detection_model_zoo.md or train your own) /// /// for instance, /// ExampleObjectDetection --input_image="/demo/input.jpg" --output_image="/demo/output.jpg" --catalog="/demo/mscoco_label_map.pbtxt" --model="/demo/frozen_inference_graph.pb" /// </summary> /// <param name="args"></param> public static void Main <T> (string [] args, ILogger <T> logger) { logger.LogDebug("tensorflow object detection started"); options.Parse(args); if (_catalogPath == null) { _catalogPath = DownloadDefaultTexts(_currentDir); } if (_modelPath == null) { _modelPath = DownloadDefaultModel(_currentDir); } logger.LogDebug("reading catalog items"); _catalog = CatalogUtil.ReadCatalogItems(_catalogPath); logger.LogDebug("catalog items read"); logger.LogDebug("input: {0} output: {1}", _input, _output); var fileTuples = new List <(string input, string output)> () { (_input, _output) }; string modelFile = _modelPath; logger.LogDebug("running tensorflow"); using (var graph = new TFGraph()) { logger.LogDebug("tensorflow graph created"); var model = File.ReadAllBytes(modelFile); graph.Import(new TFBuffer(model)); logger.LogDebug("tensorflow model imported"); using (var session = new TFSession(graph)) { foreach (var tuple in fileTuples) { var tensor = ImageUtil.CreateTensorFromImageFile(tuple.input, TFDataType.UInt8); var runner = session.GetRunner(); runner .AddInput(graph ["image_tensor"] [0], tensor) .Fetch( graph ["detection_boxes"] [0], graph ["detection_scores"] [0], graph ["detection_classes"] [0], graph ["num_detections"] [0]); logger.LogDebug("before session run"); var output = runner.Run(); logger.LogDebug("session run finished"); var boxes = (float [, , ])output [0].GetValue(jagged: false); var scores = (float [, ])output [1].GetValue(jagged: false); var classes = (float [, ])output [2].GetValue(jagged: false); var num = (float [])output [3].GetValue(jagged: false); logger.LogDebug("drawing boxes"); DrawBoxes(boxes, scores, classes, tuple.input, tuple.output, MIN_SCORE_FOR_OBJECT_HIGHLIGHTING); logger.LogDebug("boxes are drawn"); } } } logger.LogDebug("tensorflow object detection completed"); }
/// <summary> /// The utility processes the image and produces output image highlighting detected objects on it. /// You need to proceed following steps to get the example working: /// 1. Download and unzip one of trained models from /// https://github.com/tensorflow/models/blob/master/object_detection/g3doc/detection_model_zoo.md /// /// for instance 'faster_rcnn_inception_resnet_v2_atrous_coco' /// 2. Download mscoco_label_map.pbtxt from /// https://github.com/tensorflow/models/blob/master/object_detection/data/mscoco_label_map.pbtxt /// /// 3. Run the ExampleObjectDetection util from command line specifying input_image, output_image, catalog and model options /// where input_image - the path to the image for processing /// output_image - the path where the image with detected objects will be saved /// catalog - the path to the 'mscoco_label_map.pbtxt' file (see 2) /// model - the path to the 'frozen_inference_graph.pb' file (see 1) /// /// for instance, /// ExampleObjectDetection --input_image="/demo/input.jpg" --output_image="/demo/output.jpg" --catalog="/demo/mscoco_label_map.pbtxt" --model="/demo/frozen_inference_graph.pb" /// </summary> /// <param name="args"></param> static void Main(string [] args) { options.Parse(args); if (_input == null) { throw new ArgumentException("Missing required option --input_image="); } if (_output == null) { throw new ArgumentException("Missing required option --output_image="); } if (_catalogPath == null) { throw new ArgumentException("Missing required option --catalog="); } if (_modelPath == null) { throw new ArgumentException("Missing required option --model="); } _catalog = CatalogUtil.ReadCatalogItems(_catalogPath); var fileTuples = new List <(string input, string output)> () { (_input, _output) }; string modelFile = _modelPath; using (var graph = new TFGraph()) { var model = File.ReadAllBytes(modelFile); graph.Import(new TFBuffer(model)); using (var session = new TFSession(graph)) { foreach (var tuple in fileTuples) { var tensor = ImageUtil.CreateTensorFromImageFile(tuple.input, TFDataType.UInt8); var runner = session.GetRunner(); runner .AddInput(graph ["image_tensor"] [0], tensor) .Fetch( graph ["detection_boxes"] [0], graph ["detection_scores"] [0], graph ["detection_classes"] [0], graph ["num_detections"] [0]); var output = runner.Run(); var boxes = (float [, , ])output [0].GetValue(jagged: false); var scores = (float [, ])output [1].GetValue(jagged: false); var classes = (float [, ])output [2].GetValue(jagged: false); var num = (float [])output [3].GetValue(jagged: false); DrawBoxes(boxes, scores, classes, tuple.input, tuple.output, MIN_SCORE_FOR_OBJECT_HIGHLIGHTING); } } } }
static void Main() { var img = Cv2.ImRead(_input); Catalog = CatalogUtil.ReadCatalogItems(_catalogPath); var fileTuples = new List <(string input, string output)> { (_input, _output) }; string modelFile = _modelPath; using (var graph = new TFGraph()) { var model = File.ReadAllBytes(modelFile); graph.Import(new TFBuffer(model)); using (var session = new TFSession(graph)) { foreach (var tuple in fileTuples) { var tensor = ImageUtil.CreateTensorFromImageFile(tuple.input, TFDataType.UInt8); var runner = session.GetRunner(); runner .AddInput(graph["image_tensor"][0], tensor) .Fetch( graph["detection_boxes"][0], graph["detection_scores"][0], graph["detection_classes"][0], graph["num_detections"][0]); Stopwatch sw = new Stopwatch(); sw.Start(); var output = runner.Run(); sw.Stop(); Console.WriteLine($"Runtime:{sw.ElapsedMilliseconds} ms"); var boxes = (float[, , ])output[0].GetValue(jagged: false); var scores = (float[, ])output[1].GetValue(jagged: false); var classes = (float[, ])output[2].GetValue(jagged: false); var num = (float[])output[3].GetValue(jagged: false); #region show image for (int i = 0; i < boxes.GetLength(1); i++) { if (scores[0, i] > 0.5) { var idx = Convert.ToInt32(classes[0, i]); var x1 = (int)(boxes[0, i, 1] * img.Width); var y1 = (int)(boxes[0, i, 0] * img.Height); var x2 = (int)(boxes[0, i, 3] * img.Width); var y2 = (int)(boxes[0, i, 2] * img.Height); var catalog = Catalog.First(x => x.Id == idx); string label = $"{(catalog == null ? idx.ToString() : catalog.DisplayName)}: {scores[0, i] * 100:0.00}%"; Console.WriteLine($"{label} {x1} {y1} {x2} {y2}"); Cv2.Rectangle(img, new Rect(x1, y1, x2 - x1, y2 - y1), Scalar.Red, 2); int baseline; var textSize = Cv2.GetTextSize(label, HersheyFonts.HersheyTriplex, 0.5, 1, out baseline); textSize.Height = textSize.Height + baseline / 2; var y = y1 - textSize.Height < 0 ? y1 + textSize.Height : y1; Cv2.Rectangle(img, new Rect(x1, y - textSize.Height, textSize.Width, textSize.Height + baseline / 2), Scalar.Red, Cv2.FILLED); Cv2.PutText(img, label, new Point(x1, y), HersheyFonts.HersheyTriplex, 0.5, Scalar.Black); } } #endregion } } } using (new Window("image", img)) { Cv2.WaitKey(); } } }