/// <summary> /// 采用默认参数的跟踪定位和识别进行初始化 /// </summary> /// <param name="appId">应用Id</param> /// <param name="locatorKey">定位Key</param> /// <param name="recognizeKey">识别Key</param> /// <param name="useTracking">是否使用跟踪</param> public FaceProcessor(string appId, string locatorKey, string recognizeKey, bool useTracking = false) { _locator = useTracking ? LocatorFactory.GetTrackingLocator(appId, locatorKey) : LocatorFactory.GetDetectionLocator(appId, locatorKey); _recognize = new FaceRecognize(appId, recognizeKey); }
private void Init() { _traking = LocatorFactory.GetTrackingLocator(AppId, FtKey) as FaceTracking; _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection; _recognize = new FaceRecognize(AppId, FrKey); _processor = new FaceProcessor(_traking, _recognize); //init cache if (Directory.Exists(FaceLibraryPath)) { var files = Directory.GetFiles(FaceLibraryPath); foreach (var file in files) { var info = new FileInfo(file); _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file)); } } CvInvoke.UseOpenCL = false; _capture = GetWebCamera(); _capture.Start(); //Application.Idle += VideoCaptured; //可以采用此方式捕获视频,则无需单独开线程 //_capture.ImageGrabbed += VideoCaptured; //不要采用此方式 _run = new Task(VideoCaptured); _run.Start(); }
public void initWithIdCardRecognizeModel(Context context, FaceSDK.RecognizeType type) { if (faceRecognize == null) { faceRecognize = new FaceRecognize(context); faceRecognize.InitModel(FaceSDK.RecognizeType.RecognizeIdPhoto); } }
public void init(Context context, FaceSDK.RecognizeType type) { if (faceRecognize == null) { faceRecognize = new FaceRecognize(context); faceRecognize.InitModel(type); } }
private void Init() { _age = new FaceAge(AppId, AgeKey); _gender = new FaceGender(AppId, GenderKey); _traking = LocatorFactory.GetTrackingLocator(AppId, FtKey, _age, _gender) as FaceTracking; _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection; _recognize = new FaceRecognize(AppId, FrKey); _processor = new FaceProcessor(_traking, _recognize); //init cache if (Directory.Exists(FaceLibraryPath)) { var files = Directory.GetFiles(FaceLibraryPath); foreach (var file in files) { var info = new FileInfo(file); _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file)); } } stride = width * pixelSize; bufferSize = stride * height; _pImage = Marshal.AllocHGlobal(bufferSize); _image = new Bitmap(width, height, stride, PixelFormat.Format24bppRgb, _pImage); var ffmpeg = new FFMpegConverter(); outputStream = new MemoryStream(); var setting = new ConvertSettings { CustomOutputArgs = "-s 1920x1080", //根据业务需求-r参数可以调整,取决于摄像机的FPS }; //-s 1920x1080 -q:v 2 -b:v 64k //-an -r 15 -pix_fmt bgr24 -updatefirst 1 //task = ffmpeg.ConvertLiveMedia("rtsp://*****:*****@192.168.1.64:554/h264/ch1/main/av_stream", null, // outputStream, Format.raw_video, setting); /* * USB摄像头捕获 * 通过ffmpeg可以捕获USB摄像,如下代码所示。 * 首先通过:ffmpeg -list_devices true -f dshow -i dummy命令,可以列出系统中存在的USB摄像设备(或通过控制面板的设备管理工具查看设备名称),例如在我电脑中叫USB2.0 PC CAMERA。 * 然后根据捕获的分辨率,修改视频图形信息,包括width和height,一般像素大小不用修改,如果要参考设备支持的分辨率,可以使用: * ffmpeg -list_options true -f dshow -i video="USB2.0 PC CAMERA"命令 */ task = ffmpeg.ConvertLiveMedia("video=Logitech HD Webcam C270", "dshow", outputStream, Format.raw_video, setting); task.OutputDataReceived += DataReceived; task.Start(); _renderTask = new Task(Render); _renderTask.Start(); }
/// <summary> /// 采用指定的定位和识别初始化 /// </summary> /// <param name="locator">定位器</param> /// <param name="recognize">识别器</param> public FaceProcessor(FaceLocator locator, FaceRecognize recognize) { _locator = locator; _recognize = recognize; if (!_locator.IsIntialized) { _locator.Initialize(); } }
public FaceDetectionService() { _age = new FaceAge(AppConfigurations.AppId, AppConfigurations.AgeKey); // 年龄识别 _gender = new FaceGender(AppConfigurations.AppId, AppConfigurations.GenderKey); // 性别识别 //// 图片检测人脸 _detection = LocatorFactory.GetDetectionLocator(AppConfigurations.AppId, AppConfigurations.FdKey, _age, _gender) as FaceDetection; _traking = LocatorFactory.GetTrackingLocator(AppConfigurations.AppId, AppConfigurations.FtKey, _age, _gender) as FaceTracking; _recognize = new FaceRecognize(AppConfigurations.AppId, AppConfigurations.FrKey); _processor = new FaceProcessor(_detection, _recognize); _personFaceRepository = new PersonFaceRepository(); }
public void init(Context context) { if (faceRecognize == null) { faceRecognize = new FaceRecognize(context); // RECOGNIZE_LIVE普通生活照、视频帧识别模型(包含特征抽取) // RECOGNIZE_ID_PHOTO 身份证芯片模型(包含特征抽取) // RECOGNIZE_NIR 近红外图片识别模型(包含特征抽取) // 两张图片的识别需要使用相同的模型 faceRecognize.InitModel(FaceSDK.RecognizeType.RecognizeLive); faceRecognize.InitModel(FaceSDK.RecognizeType.RecognizeIdPhoto); // faceRecognize.initModel(FaceSDK.RecognizeType.RECOGNIZE_NIR); } }
/// <summary> /// 根据locate数据,从图片中提供人脸特征,先执行 FaceLocateResult() 获得图片的人脸特征 /// </summary> /// <param name="bitmap">图片</param> /// <param name="locate">图片中的人脸数据</param> /// <returns>返回一个人脸特征,适用于图片中只有一个人脸的情况</returns> public Feature GetFaceFeature(Bitmap bitmap, LocateResult locate) { Feature feature = null; var result = _detection.Detect(bitmap, out var locateResult); if (result == Stepon.FaceRecognization.Common.ErrorCode.Ok && locateResult.HasFace) { using (_recognize = new FaceRecognize(AppConfigurations.AppId, AppConfigurations.FrKey)) { feature = _recognize.ExtractFeature(locate.OffInput, locate.Faces[0], locate.FacesOrient[0]); } } return(feature); }
private void Init() { _traking = LocatorFactory.GetTrackingLocator(AppId, FtKey) as FaceTracking; _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection; _recognize = new FaceRecognize(AppId, FrKey); _processor = new FaceProcessor(_traking, _recognize); //init cache if (Directory.Exists(FaceLibraryPath)) { var files = Directory.GetFiles(FaceLibraryPath); foreach (var file in files) { var info = new FileInfo(file); _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file)); } } _pImage = Marshal.AllocHGlobal(1920 * 1080 * 3); _image = new Bitmap(1920, 1080, 1920 * 3, PixelFormat.Format24bppRgb, _pImage); var ffmpeg = new FFMpegConverter(); outputStream = new MemoryStream(); var setting = new ConvertSettings { CustomOutputArgs = "-an -r 15 -pix_fmt bgr24 -updatefirst 1" //根据业务需求-r参数可以调整,取决于摄像机的FPS }; //-s 1920x1080 -q:v 2 -b:v 64k task = ffmpeg.ConvertLiveMedia("rtsp://*****:*****@192.168.1.64:554/h264/ch1/main/av_stream", null, outputStream, Format.raw_video, setting); task.OutputDataReceived += DataReceived; task.Start(); _renderTask = new Task(Render); _renderTask.Start(); }