public int execute(int exec_func, int out_img_type, HVCResult frame_result, GrayscaleImage img) { var cmd = new List <byte>(HVC_CMD_HDR_EXECUTE); // Executes specified functions. e.g. Face detection, Age estimation, etc // Adds face flag if using facial estimation function if ((exec_func & (p2def.EX_DIRECTION | p2def.EX_AGE | p2def.EX_GENDER | p2def.EX_GAZE | p2def.EX_BLINK | p2def.EX_EXPRESSION)) != 0x00) { exec_func |= p2def.EX_FACE + p2def.EX_DIRECTION; } cmd.AddRange(BitConverter.GetBytes((UInt16)(exec_func))); cmd.Add((byte)out_img_type); var sendResult = this._send_command(cmd.ToArray(), cmd.ToArray().Length); if (sendResult.response_code == 0x00) { // Success int rc = frame_result.read_from_buffer(exec_func, sendResult.data_len, sendResult.data); if (out_img_type != 0x00) { img.width = BitConverter.ToInt16(sendResult.data, rc); img.height = BitConverter.ToInt16(sendResult.data, (rc + 2)); img.data = sendResult.data.Skip(rc + 4).Take(img.width * img.height).ToArray(); } } return(sendResult.response_code); }
public void appned_FRAME_RESULT(HVCResult frame_result) { // Body detection result for (var i = 0; i < frame_result.bodies.Count(); i++) { var body_res = new TrackingResult(frame_result.bodies[i].pos_x, frame_result.bodies[i].pos_y, frame_result.bodies[i].size, frame_result.bodies[i].conf, i, hvc_tracking_result.STB_TRID_NOT_TRACKED ); this.bodies.Add(body_res); } // Hand detection result for (var i = 0; i < frame_result.hands.Count(); i++) { var hand_res = new TrackingResult(frame_result.hands[i].pos_x, frame_result.hands[i].pos_y, frame_result.hands[i].size, frame_result.hands[i].conf, i, hvc_tracking_result.STB_TRID_NOT_TRACKED); this.hands.Add(hand_res); } // Face detection result for (var i = 0; i < frame_result.faces.Count(); i++) { var face_res = new TrackingFaceResult(frame_result.faces[i].pos_x, frame_result.faces[i].pos_y, frame_result.faces[i].size, frame_result.faces[i].conf, i, hvc_tracking_result.STB_TRID_NOT_TRACKED); // Face direction result if (frame_result.faces[i].direction != null) { face_res.direction = new DirectionResult(frame_result.faces[i].direction.LR, frame_result.faces[i].direction.UD, frame_result.faces[i].direction.roll, frame_result.faces[i].direction.conf); } // Age estimation result if (frame_result.faces[i].age != null) { face_res.age = new AgeResult(frame_result.faces[i].age.age, frame_result.faces[i].age.conf); } // Gender estimation result if (frame_result.faces[i].gender != null) { face_res.gender = new GenderResult(frame_result.faces[i].gender.gender, frame_result.faces[i].gender.conf); } // Gaze estimation result if (frame_result.faces[i].gaze != null) { face_res.gaze = new GazeResult(frame_result.faces[i].gaze.gazeLR, frame_result.faces[i].gaze.gazeUD); } // Blink estimation result if (frame_result.faces[i].blink != null) { face_res.blink = new BlinkResult(frame_result.faces[i].blink.ratioR, frame_result.faces[i].blink.ratioL); } // Expression estimation result if (frame_result.faces[i].expression != null) { face_res.expression = new ExpressionResult(frame_result.faces[i].expression.neutral, frame_result.faces[i].expression.happiness, frame_result.faces[i].expression.surprise, frame_result.faces[i].expression.anger, frame_result.faces[i].expression.sadness, frame_result.faces[i].expression.neg_pos); } // Face recognition result if (frame_result.faces[i].recognition != null) { face_res.recognition = new RecognitionResult(frame_result.faces[i].recognition.uid, frame_result.faces[i].recognition.score); } // Appends to face list. this.faces.Add(face_res); } }
/// <summary> /// <para>Executes functions specified in the constructor.</para> /// <para>e.g. Face detection, Age estimation etc.</para> /// </summary> /// <param name="out_img_type">output image type /// <para> OUT_IMG_TYPE_NONE (00h): no image output</para> /// <para> OUT_IMG_TYPE_QVGA (01h): 320x240 pixel resolution(QVGA)</para> /// <para> OUT_IMG_TYPE_QQVGA (02h): 160x120 pixel resolution(QQVGA)</para></param> /// <param name="tracking_result">the tracking result is stored</param> /// <param name="out_img">output image</param> /// <returns>struct of (response_code, stb_return) /// <para> response_code (int): response code form B5T-007001</para> /// <para> stb_return (bool): return status of STB library</para> /// </returns> public EXECUTE_RET execute(int out_img_type, HVCTrackingResult tracking_result, GrayscaleImage out_img) { EXECUTE_RET retvalue; retvalue.response_code = 0; retvalue.stb_return = 0; var frame_result = new HVCResult(); retvalue.response_code = this._hvc_p2_wrapper.execute(this._exec_func, out_img_type, frame_result, out_img); tracking_result.clear(); if ((this.use_stb == true) && (this._exec_func != p2def.EX_NONE)) { var stb_in = new STBLibWrapper.STB_FRAME_RESULT(); stb_in.bodys.body = new STBLibWrapper.STB_FRAME_RESULT_DETECTION[STB.STB_FRAME_NUM]; stb_in.faces.face = new STBLibWrapper.STB_FRAME_RESULT_FACE[STB.STB_FRAME_NUM]; frame_result.export_to_C_FRAME_RESULT(ref stb_in); var stb_out_f = new STBLibWrapper.STB_FACE[STB.STB_FRAME_NUM]; var stb_out_b = new STBLibWrapper.STB_BODY[STB.STB_FRAME_NUM]; STB.STB_EXECUTE_RET stb_ret = this._stb.execute(stb_in, stb_out_f, stb_out_b); if (stb_ret.retcode < 0) { // STB error retvalue.stb_return = stb_ret.retcode; return(retvalue); } tracking_result.faces.append_C_FACE_RES35(this._exec_func, (int)stb_ret.face_count, stb_out_f); if ((this._exec_func & p2def.EX_DIRECTION) != 0) { tracking_result.faces.append_direction_list(frame_result.faces); } if ((this._exec_func & p2def.EX_GAZE) != 0) { tracking_result.faces.append_gaze_list(frame_result.faces); } if ((this._exec_func & p2def.EX_BLINK) != 0) { tracking_result.faces.append_blink_list(frame_result.faces); } if ((this._exec_func & p2def.EX_EXPRESSION) != 0) { tracking_result.faces.append_expression_list(frame_result.faces); } tracking_result.bodies.append_BODY_RES35(this._exec_func, (int)stb_ret.body_count, stb_out_b); tracking_result.hands.append_hand_list(frame_result.hands); } else { tracking_result.appned_FRAME_RESULT(frame_result); } return(retvalue); }