/// <summary> /// <para>Executes stabilization process.</para> /// <para>In:</para> /// <para> - each frame result by frame_res(C_FRAME_RESULT) input argument</para> /// <para>Out:</para> /// <para> - face count (return)</para> /// <para> - body count (return)</para> /// <para> - stabilized face result by faces_res(C_FACE_RESULTS) output argument</para> /// <para> - stabilized body result by bodies_res(C_BODY_RESULTS) output argument</para> /// </summary> /// <param name="frame_res">input one frame result for STBLib. /// Set the information of face central coordinate, size and /// direction to stabilize age, gender and face recognition.</param> /// <param name="faces_res">output result stabilized face data.</param> /// <param name="bodies_res">output result stabilized body data.</param> /// <returns>struct of (stb_return, face_count, body_count) /// stb_return (int): return value of STB library /// face_count (int): face count /// body_count (int): body count /// </returns> public STB_EXECUTE_RET execute(STBLibWrapper.STB_FRAME_RESULT frameresult, STBLibWrapper.STB_FACE[] faces_res, STBLibWrapper.STB_BODY[] bodies_res) { STB_EXECUTE_RET ret; ret.face_count = 0; ret.body_count = 0; ret.retcode = STBLibWrapper.STBLib.STB_SetFrameResult(this.hstb, ref frameresult); if (ret.retcode != STB_RET_NORMAL) { return(ret); } ret.retcode = STBLibWrapper.STBLib.STB_Execute(this.hstb); if (ret.retcode != STB_RET_NORMAL) { return(ret); } ret.retcode = STBLibWrapper.STBLib.STB_GetFaces(this.hstb, out ret.face_count, faces_res); if (ret.retcode != STB_RET_NORMAL) { ret.face_count = 0; ret.body_count = 0; return(ret); } ret.retcode = STBLibWrapper.STBLib.STB_GetBodies(this.hstb, out ret.body_count, bodies_res); if (ret.retcode != STB_RET_NORMAL) { ret.face_count = 0; ret.body_count = 0; return(ret); } return(ret); }
/// <summary> /// <para>Executes functions specified in the constructor.</para> /// <para>e.g. Face detection, Age estimation etc.</para> /// </summary> /// <param name="out_img_type">output image type /// <para> OUT_IMG_TYPE_NONE (00h): no image output</para> /// <para> OUT_IMG_TYPE_QVGA (01h): 320x240 pixel resolution(QVGA)</para> /// <para> OUT_IMG_TYPE_QQVGA (02h): 160x120 pixel resolution(QQVGA)</para></param> /// <param name="tracking_result">the tracking result is stored</param> /// <param name="out_img">output image</param> /// <returns>struct of (response_code, stb_return) /// <para> response_code (int): response code form B5T-007001</para> /// <para> stb_return (bool): return status of STB library</para> /// </returns> public EXECUTE_RET execute(int out_img_type, HVCTrackingResult tracking_result, GrayscaleImage out_img) { EXECUTE_RET retvalue; retvalue.response_code = 0; retvalue.stb_return = 0; var frame_result = new HVCResult(); retvalue.response_code = this._hvc_p2_wrapper.execute(this._exec_func, out_img_type, frame_result, out_img); tracking_result.clear(); if ((this.use_stb == true) && (this._exec_func != p2def.EX_NONE)) { var stb_in = new STBLibWrapper.STB_FRAME_RESULT(); stb_in.bodys.body = new STBLibWrapper.STB_FRAME_RESULT_DETECTION[STB.STB_FRAME_NUM]; stb_in.faces.face = new STBLibWrapper.STB_FRAME_RESULT_FACE[STB.STB_FRAME_NUM]; frame_result.export_to_C_FRAME_RESULT(ref stb_in); var stb_out_f = new STBLibWrapper.STB_FACE[STB.STB_FRAME_NUM]; var stb_out_b = new STBLibWrapper.STB_BODY[STB.STB_FRAME_NUM]; STB.STB_EXECUTE_RET stb_ret = this._stb.execute(stb_in, stb_out_f, stb_out_b); if (stb_ret.retcode < 0) { // STB error retvalue.stb_return = stb_ret.retcode; return(retvalue); } tracking_result.faces.append_C_FACE_RES35(this._exec_func, (int)stb_ret.face_count, stb_out_f); if ((this._exec_func & p2def.EX_DIRECTION) != 0) { tracking_result.faces.append_direction_list(frame_result.faces); } if ((this._exec_func & p2def.EX_GAZE) != 0) { tracking_result.faces.append_gaze_list(frame_result.faces); } if ((this._exec_func & p2def.EX_BLINK) != 0) { tracking_result.faces.append_blink_list(frame_result.faces); } if ((this._exec_func & p2def.EX_EXPRESSION) != 0) { tracking_result.faces.append_expression_list(frame_result.faces); } tracking_result.bodies.append_BODY_RES35(this._exec_func, (int)stb_ret.body_count, stb_out_b); tracking_result.hands.append_hand_list(frame_result.hands); } else { tracking_result.appned_FRAME_RESULT(frame_result); } return(retvalue); }
public static extern int STB_SetFrameResult(IntPtr hstb, ref STB_FRAME_RESULT frameresult);
public void export_to_C_FRAME_RESULT(ref STBLibWrapper.STB_FRAME_RESULT frame_result) { // Human body detection result frame_result.bodys.nCount = this.bodies.Count(); for (var i = 0; i < this.bodies.Count(); i++) { frame_result.bodys.body[i].center.nX = this.bodies[i].pos_x; frame_result.bodys.body[i].center.nY = this.bodies[i].pos_y; frame_result.bodys.body[i].nSize = this.bodies[i].size; frame_result.bodys.body[i].nConfidence = this.bodies[i].conf; } // Face detection result frame_result.faces.nCount = this.faces.Count(); for (var i = 0; i < this.faces.Count(); i++) { frame_result.faces.face[i].center.nX = this.faces[i].pos_x; frame_result.faces.face[i].center.nY = this.faces[i].pos_y; frame_result.faces.face[i].nSize = this.faces[i].size; frame_result.faces.face[i].nConfidence = this.faces[i].conf; // Face direction result if (this.faces[i].direction != null) { frame_result.faces.face[i].direction.nLR = this.faces[i].direction.LR; frame_result.faces.face[i].direction.nUD = this.faces[i].direction.UD; frame_result.faces.face[i].direction.nRoll = this.faces[i].direction.roll; frame_result.faces.face[i].direction.nConfidence = this.faces[i].direction.conf; } // Age estimation result if (this.faces[i].age != null) { frame_result.faces.face[i].age.nAge = this.faces[i].age.age; frame_result.faces.face[i].age.nConfidence = this.faces[i].age.conf; } // Gender estimation result if (this.faces[i].gender != null) { frame_result.faces.face[i].gender.nGender = this.faces[i].gender.gender; frame_result.faces.face[i].gender.nConfidence = this.faces[i].gender.conf; } // Gaze estimation result if (this.faces[i].gaze != null) { frame_result.faces.face[i].gaze.nLR = this.faces[i].gaze.gazeLR; frame_result.faces.face[i].gaze.nUD = this.faces[i].gaze.gazeUD; } // Blink estimation result if (this.faces[i].blink != null) { frame_result.faces.face[i].blink.nLeftEye = this.faces[i].blink.ratioL; frame_result.faces.face[i].blink.nRightEye = this.faces[i].blink.ratioR; } // Expression estimation result if (this.faces[i].expression != null) { frame_result.faces.face[i].expression.anScore = new int[(int)STBLibWrapper.STB_OKAO_EXPRESSION.STB_Expression_Max]; frame_result.faces.face[i].expression.anScore[(int)STBLibWrapper.STB_OKAO_EXPRESSION.STB_Expression_Neutral] = this.faces[i].expression.neutral; frame_result.faces.face[i].expression.anScore[(int)STBLibWrapper.STB_OKAO_EXPRESSION.STB_Expression_Happiness] = this.faces[i].expression.happiness; frame_result.faces.face[i].expression.anScore[(int)STBLibWrapper.STB_OKAO_EXPRESSION.STB_Expression_Surprise] = this.faces[i].expression.surprise; frame_result.faces.face[i].expression.anScore[(int)STBLibWrapper.STB_OKAO_EXPRESSION.STB_Expression_Anger] = this.faces[i].expression.anger; frame_result.faces.face[i].expression.anScore[(int)STBLibWrapper.STB_OKAO_EXPRESSION.STB_Expression_Sadness] = this.faces[i].expression.sadness; } // Recognition result if (this.faces[i].recognition != null) { frame_result.faces.face[i].recognition.uID = this.faces[i].recognition.uid; frame_result.faces.face[i].recognition.nScore = this.faces[i].recognition.score; } } }