private void DrawToTexture(Texture texture, Annotation[] annotations, Texture2D results) { if (texture is Texture2D) { Texture2D t2d = texture as Texture2D; _drawableTexture.SetPixels32(t2d.GetPixels32()); } else { Texture2D tmp = new Texture2D(texture.width, texture.height, GraphicsFormat.R8G8B8A8_SRGB, texture.mipmapCount, TextureCreationFlags.None); Graphics.CopyTexture(texture, tmp); _drawableTexture.SetPixels32(tmp.GetPixels32()); } foreach (Annotation annotation in annotations) { float left = annotation.Rectangle[0] * _drawableTexture.width; float top = annotation.Rectangle[1] * _drawableTexture.height; float right = annotation.Rectangle[2] * _drawableTexture.width; float bottom = annotation.Rectangle[3] * _drawableTexture.height; Rect scaledLocation = new Rect(left, top, right - left, bottom - top); scaledLocation.y = texture.height - scaledLocation.y; scaledLocation.height = -scaledLocation.height; NativeImageIO.DrawRect(_drawableTexture, scaledLocation, Color.red); } results.Apply(); }
private static void DrawToTexture(Texture texture, Annotation[] annotations, Texture2D results) { Debug.Assert(texture.width == results.width && texture.height == results.height, "Input texture and output texture must have the same width & height."); Color32[] pixels; if (texture is Texture2D) { Texture2D t2d = texture as Texture2D; pixels = t2d.GetPixels32(); } else if (texture is WebCamTexture) { WebCamTexture wct = texture as WebCamTexture; pixels = wct.GetPixels32(); } else { Texture2D tmp = new Texture2D(texture.width, texture.height, GraphicsFormat.R8G8B8A8_SRGB, texture.mipmapCount, TextureCreationFlags.None); Graphics.CopyTexture(texture, tmp); pixels = tmp.GetPixels32(); Destroy(tmp); } foreach (Annotation annotation in annotations) { float left = annotation.Rectangle[0] * texture.width; float top = annotation.Rectangle[1] * texture.height; float right = annotation.Rectangle[2] * texture.width; float bottom = annotation.Rectangle[3] * texture.height; Rect scaledLocation = new Rect(left, top, right - left, bottom - top); scaledLocation.y = texture.height - scaledLocation.y; scaledLocation.height = -scaledLocation.height; NativeImageIO.DrawRect(pixels, texture.width, texture.height, scaledLocation, Color.red); } results.SetPixels32(pixels); results.Apply(); }
public static void DrawResults(Texture2D image, MultiboxGraph.Result[] results, float scoreThreshold, bool flipUpSideDown = false) { Annotation[] annotations = FilterResults(results, scoreThreshold); Color color = new Color(1.0f, 0, 0);//Set color to red for (int i = 0; i < annotations.Length; i++) { Rect[] rects = ScaleLocation(annotations[i].Rectangle, image.width, image.height, flipUpSideDown); foreach (Rect r in rects) { NativeImageIO.DrawRect(image, r, color); } } image.Apply(); //GUI.color = Color.white;//Reset color to white /* * Android.Graphics.Paint p = new Android.Graphics.Paint(); * p.SetStyle(Paint.Style.Stroke); * p.AntiAlias = true; * p.Color = Android.Graphics.Color.Red; * Canvas c = new Canvas(bmp); * * * for (int i = 0; i < result.Scores.Length; i++) * { * if (result.Scores[i] > scoreThreshold) * { * Rectangle rect = locations[result.Indices[i]]; * Android.Graphics.Rect r = new Rect(rect.Left, rect.Top, rect.Right, rect.Bottom); * c.DrawRect(r, p); * } * }*/ }
private void RecognizeAndUpdateText(Texture2D texture) { if (_mobilenet == null) { _displayMessage = "Waiting for mobile net model to be loaded..."; return; } Stopwatch watch = Stopwatch.StartNew(); CocoSsdMobilenet.RecognitionResult[] results = _mobilenet.Recognize(texture, true, false, 0.5f); watch.Stop(); if (drawableTexture == null || drawableTexture.width != texture.width || drawableTexture.height != texture.height) { drawableTexture = new Texture2D(texture.width, texture.height, TextureFormat.ARGB32, false); } drawableTexture.SetPixels(texture.GetPixels()); Annotation[] annotations = new Annotation[results.Length]; for (int i = 0; i < results.Length; i++) { Annotation annotation = new Annotation(); annotation.Rectangle = results[i].Rectangle; annotation.Label = String.Format("{0}:({1:0.00}%)", results[i].Label, results[i].Score * 100); annotations[i] = annotation; } String objectNames = String.Empty; foreach (Annotation annotation in annotations) { float left = annotation.Rectangle[0] * drawableTexture.width; float top = annotation.Rectangle[1] * drawableTexture.height; float right = annotation.Rectangle[2] * drawableTexture.width; float bottom = annotation.Rectangle[3] * drawableTexture.height; Rect scaledLocation = new Rect(left, top, right - left, bottom - top); scaledLocation.y = texture.height - scaledLocation.y; scaledLocation.height = -scaledLocation.height; NativeImageIO.DrawRect(drawableTexture, scaledLocation, Color.red); objectNames = objectNames + annotation.Label + ";"; } drawableTexture.Apply(); //MultiboxGraph.DrawResults(drawableTexture, results, 0.2f, true); if (!String.IsNullOrEmpty(objectNames)) { objectNames = String.Format("({0})", objectNames); } String resStr = String.Empty; if (results != null) { resStr = String.Format("{0} objects detected{1}. Recognition completed in {2} milliseconds.", annotations.Length, objectNames, watch.ElapsedMilliseconds); //resStr = String.Format("Object is {0} with {1}% probability. Recognition completed in {2} milliseconds.", results[0].Label, results[0].Probability * 100, watch.ElapsedMilliseconds); } _displayMessage = resStr; }