public static Array2D <T> ToArray2D <T>(this Bitmap bitmap) where T : struct { var format = bitmap.PixelFormat; if (!OptimumConvertImageInfos.TryGetValue(format, out var infos)) { throw new NotSupportedException($"{format} is not support"); } if (!OptimumChannels.TryGetValue(format, out var channels)) { throw new NotSupportedException($"{format} is not support"); } var width = bitmap.Width; var height = bitmap.Height; var requireDispose = true; Array2D <T> array = null; try { array = new Array2D <T>(height, width); var info = infos.FirstOrDefault(i => i.Type == array.ImageType); if (info != null) { ToNative(bitmap, info.Type, array.NativePtr, info.RgbReverse, channels); requireDispose = false; } else { throw new NotSupportedException($"Not support converting from {format} to {array.ImageType}"); } } finally { if (requireDispose) { array?.Dispose(); throw new NotSupportedException(); } } return(array); }
public static Array2D <T> ToArray2D <T>(this WriteableBitmap bitmap) where T : struct { var format = bitmap.Format; if (!OptimumConvertImageInfos.TryGetValue(format, out var info)) { throw new NotSupportedException($"{format} is not support"); } if (!OptimumChannels.TryGetValue(format, out var channels)) { throw new NotSupportedException($"{format} is not support"); } var width = bitmap.PixelWidth; var height = bitmap.PixelHeight; var requireDispose = true; Array2D <T> array = null; try { array = new Array2D <T>(height, width); if (array.ImageType == info.Type) { ToNative(bitmap, info.Type, array.NativePtr, info.RgbReverse, channels); requireDispose = false; } } finally { if (requireDispose) { array?.Dispose(); throw new NotSupportedException(); } } return(array); }
private void LoadImage(uint index) { if (index >= this._Metadata.Images.Count) { return; } this._ImagePos = index; Array2D <RgbPixel> img = null; try { this._Display.ClearOverlay(); try { img = Dlib.LoadImage <RgbPixel>(this._Metadata.Images[(int)index].FileName); this.Title = $"{this._Metadata.Name} #{index}: {this._Metadata.Images[(int)index].FileName}"; } catch (Exception e) { Dlib.MessageBox("Error loading image", e.Message); } if (this._DisplayEquializedImage) { Dlib.EqualizeHistogram(img); } this._Display.SetImage(img); using (var overlays = new EnumerableDisposer <ImageDisplay.OverlayRect>(this.GetOverlays(this._Metadata.Images[(int)index], this._ColorMapper))) this._Display.AddOverlay(overlays.Collection); } finally { img?.Dispose(); } }
//-------------------------------------------------------------------------------------------------------- /// <summary> /// OpenCVとDlib68を併用した推定。Dlib68より高速だが、顔の検出率は低め。 /// </summary> /// <param name="threadNo">走らせるスレッド番号</param> /// <param name="est_pos">推定した位置</param> /// <param name="est_rot">推定した回転</param> /// <returns>推定できたか</returns> private bool Mixed(int threadNo, out Vector3 est_pos, out Vector3 est_rot) { Mat image_r = new Mat(); Mat image = new Mat(); try { lock (lock_capture) { image_r = caputure.Read(); if (image_r.Data == null) { throw new NullReferenceException("capture is null"); } if (ptr.Contains(image_r.Data)) { throw new InvalidOperationException("taken same data"); } else { ptr[threadNo] = image_r.Data; } } if (resolution == 1) { image = image_r.Clone(); } else { Cv2.Resize(image_r, image, new Size(image_r.Cols / resolution, image_r.Rows / resolution)); } GC.KeepAlive(image_r); var faces = cascade.DetectMultiScale(image); if (!faces.Any()) { throw new InvalidOperationException("this contains no elements"); } Array2D <RgbPixel> array2D = new Array2D <RgbPixel>(); lock (lock_imagebytes[threadNo]) { Marshal.Copy(image.Data, bytes[threadNo], 0, bytes[threadNo].Length); array2D = Dlib.LoadImageData <RgbPixel>(bytes[threadNo], (uint)image.Height, (uint)image.Width, (uint)(image.Width * image.ElemSize())); } var rectangles = new Rectangle(faces.First().Left, faces.First().Top, faces.First().Right, faces.First().Bottom); DlibDotNet.Point[] points = new DlibDotNet.Point[68]; using (FullObjectDetection shapes = shape.Detect(array2D, rectangles)) { for (uint i = 0; i < 68; i++) { points[i] = shapes.GetPart(i); } lock (lock_landmarks) { landmark_detection = points; } } array2D.Dispose(); Point2f[] image_points = new Point2f[6]; image_points[0] = new Point2f(points[30].X, points[30].Y); image_points[1] = new Point2f(points[8].X, points[8].Y); image_points[2] = new Point2f(points[45].X, points[45].Y); image_points[3] = new Point2f(points[36].X, points[36].Y); image_points[4] = new Point2f(points[54].X, points[54].Y); image_points[5] = new Point2f(points[48].X, points[48].Y); var image_points_mat = new Mat(image_points.Length, 1, MatType.CV_32FC2, image_points); eye_point_R[threadNo][0] = points[42]; eye_point_L[threadNo][1] = points[36]; eye_point_R[threadNo][2] = points[43]; eye_point_L[threadNo][2] = points[38]; eye_point_R[threadNo][3] = points[47]; eye_point_L[threadNo][3] = points[40]; eye_point_R[threadNo][4] = points[44]; eye_point_L[threadNo][4] = points[37]; eye_point_R[threadNo][5] = points[46]; eye_point_L[threadNo][5] = points[41]; Mat rvec_mat = new Mat(); Mat tvec_mat = new Mat(); Mat projMatrix_mat = new Mat(); Cv2.SolvePnP(model_points_mat, image_points_mat, camera_matrix_mat, dist_coeffs_mat, rvec_mat, tvec_mat); Marshal.Copy(tvec_mat.Data, pos_double[threadNo], 0, 3); Cv2.Rodrigues(rvec_mat, projMatrix_mat); Marshal.Copy(projMatrix_mat.Data, proj[threadNo], 0, 9); est_pos.x = -(float)pos_double[threadNo][0]; est_pos.y = (float)pos_double[threadNo][1]; est_pos.z = (float)pos_double[threadNo][2]; est_rot = RotMatToQuatanion(proj[threadNo]).eulerAngles; if (blink_tracking) { BlinkTracker(threadNo, eye_point_L[threadNo], eye_point_R[threadNo], est_rot); } if (eye_tracking) { EyeTracker(threadNo, image, points.Skip(42).Take(6), points.Skip(36).Take(6)); } image_points_mat.Dispose(); rvec_mat.Dispose(); tvec_mat.Dispose(); projMatrix_mat.Dispose(); GC.KeepAlive(image); } catch (Exception e) { Debug.Log(e.ToString()); est_pos = pos; est_rot = rot; if (image.IsEnabledDispose) { image.Dispose(); } return(false); } lock (lock_imagebytes[threadNo]) { if (image.IsEnabledDispose) { image.Dispose(); } } return(true); }
//-------------------------------------------------------------------------------------------------------- /// <summary> /// Dlib5を利用した顔検出 Face detection using Dlib5 /// </summary> /// <param name="threadNo">スレッド番号 Thread number</param> /// <param name="est_pos">推定された位置 Estimated position</param> /// <param name="est_rot">推定された回転 Estimated quotation</param> /// <returns>推定できたか Whether it could be estimated</returns> private bool Dlib5(int threadNo, out Vector3 est_pos, out Vector3 est_rot) { est_rot = rot; Mat image_r = new Mat(); Array2D <RgbPixel> array2D = new Array2D <RgbPixel>(); Mat image = new Mat(); try { lock (lock_capture) { image_r = caputure.Read(); if (image_r.Data == null) { throw new NullReferenceException("capture is null"); } if (ptr.Contains(image_r.Data)) { throw new InvalidOperationException("taken same data"); } else { ptr[threadNo] = image_r.Data; } } if (resolution == 1) { image = image_r.Clone(); } else { Cv2.Resize(image_r, image, new Size(image_r.Cols / resolution, image_r.Rows / resolution)); } GC.KeepAlive(image_r); lock (lock_imagebytes[threadNo]) { Marshal.Copy(image.Data, bytes[threadNo], 0, bytes[threadNo].Length); array2D = Dlib.LoadImageData <RgbPixel>(bytes[threadNo], (uint)image.Height, (uint)image.Width, (uint)(image.Width * image.ElemSize())); } Rectangle rectangles = default; if (un_safe) { rectangles = detector[0].Operator(array2D).FirstOrDefault(); } else { rectangles = detector[threadNo].Operator(array2D).FirstOrDefault(); } DlibDotNet.Point[] points = new DlibDotNet.Point[5]; if (rectangles == default) { throw new InvalidOperationException("this contains no elements."); } using (FullObjectDetection shapes = shape.Detect(array2D, rectangles)) { for (uint i = 0; i < 5; i++) { points[i] = shapes.GetPart(i); } } est_pos.x = -(image.Width / 2 - points[4].X) / (float)image.Width; est_pos.y = (image.Height / 2 - points[4].Y) / (float)image.Height; est_pos.z = (points[0].X - points[2].X) / (float)image.Width + (points[0].Y - points[2].Y) / (float)image.Height - z_offset; try { est_rot.z = Mathf.Rad2Deg * Mathf.Atan2(points[0].Y - points[2].Y, points[0].X - points[2].X); } catch (DivideByZeroException) { est_rot.z = points[0].Y - points[2].Y < 0 ? -90 : 90; } if (debug_face_image) { DetectDebug(threadNo, image, points: points); } GC.KeepAlive(image); } catch (Exception e) { Debug.Log(e.ToString()); est_pos = pos; if (array2D.IsEnableDispose) { array2D.Dispose(); } if (image.IsEnabledDispose) { image.Dispose(); } return(false); } if (array2D.IsEnableDispose) { array2D.Dispose(); } lock (lock_imagebytes[threadNo]) { if (image.IsEnabledDispose) { image.Dispose(); } } return(true); }
private void LoadImageAndSetSize(uint index) { if (index >= this._Metadata.Images.Count) { return; } this._ImagePos = index; Array2D <RgbPixel> img = null; try { this._Display.ClearOverlay(); try { img = Dlib.LoadImage <RgbPixel>(this._Metadata.Images[(int)index].FileName); this.Title = $"{this._Metadata.Name} #{index}: {this._Metadata.Images[(int)index].FileName}"; } catch (Exception e) { Dlib.MessageBox("Error loading image", e.Message); return; } this.GetDisplaySize(out var screenWidth, out var screenHeight); var neededWidth = this._Display.Left + img.Columns + 4; var neededHeight = this._Display.Top + img.Rows + 4; if (neededWidth < 300) { neededWidth = 300; } if (neededHeight < 300) { neededHeight = 300; } if (neededWidth > 100 + screenWidth) { neededWidth = (int)(screenWidth - 100); } if (neededHeight > 100 + screenHeight) { neededHeight = (int)(screenHeight - 100); } this.SetSize(neededWidth, neededHeight); if (this._DisplayEquializedImage) { Dlib.EqualizeHistogram(img); } this._Display.SetImage(img); using (var overlays = new EnumerableDisposer <ImageDisplay.OverlayRect>(this.GetOverlays(this._Metadata.Images[(int)index], this._ColorMapper))) this._Display.AddOverlay(overlays.Collection); } finally { img?.Dispose(); } }