private static void DetectSqueezeNet(NcnnDotNet.OpenCV.Mat bgr, List <float> clsScores) { using (var squeezeNet = new Net()) { if (Ncnn.IsSupportVulkan) { squeezeNet.Opt.UseVulkanCompute = true; } // the ncnn model https://github.com/nihui/ncnn-assets/tree/master/models squeezeNet.LoadParam("squeezenet_v1.1.param"); squeezeNet.LoadModel("squeezenet_v1.1.bin"); using var @in = Mat.FromPixelsResize(bgr.Data, PixelType.Bgr, bgr.Cols, bgr.Rows, 227, 227); var meanVals = new[] { 104f, 117f, 123f }; @in.SubstractMeanNormalize(meanVals, null); using var ex = squeezeNet.CreateExtractor(); ex.Input("data", @in); using var @out = new Mat(); ex.Extract("prob", @out); clsScores.Capacity = @out.W; for (var j = 0; j < @out.W; j++) { clsScores.Add(@out[j]); } } }
public List <Face> DetectFaces(string imagePath) { List <Face> faces = new List <Face>() { }; float tolerance = 0.5f; using (NcnnDotNet.OpenCV.Mat frame = NcnnDotNet.OpenCV.Cv2.ImRead(imagePath)) { using (NcnnDotNet.Mat inMat = NcnnDotNet.Mat.FromPixels(frame.Data, NcnnDotNet.PixelType.Bgr2Rgb, frame.Cols, frame.Rows)) { FaceInfo[] faceInfos = _centerFaceDetector.Detect(inMat, frame.Cols, frame.Rows, tolerance).ToArray(); foreach (FaceInfo detectedFace in faceInfos) { faces.Add(new Face((int)detectedFace.X1, (int)detectedFace.Y1, (int)detectedFace.X2 - (int)detectedFace.X1, (int)detectedFace.Y2 - (int)detectedFace.Y1, DrawPen)); } } } return(faces); }