/// <summary> /// Constructor<br> /// [Description]<br> /// none<br> /// [Notes]<br> /// </summary> public DetectionResult(HVC_RES parent) { this.Parent = parent; PosX = -1; PosY = -1; Size = -1; Confidence = -1; }
/// <summary> /// Execute HVC functions<br> /// [Description]<br> /// Execute each HVC function. Store results in HVC_BLE.Result<br> </summary> /// <param name="inExec"> execution flag<br> </param> /// <param name="outStatus"> HVC execution result status<br> </param> /// <returns> int execution result error code <br> </returns> public async override Task <int> Execute(int inExec, HVC_RES res) { if (BtDevice == null) { Debug.WriteLine(TAG, "execute() : HVC_ERROR_NODEVICES"); return(HVC_ERROR_NODEVICES); } if (Service == null || Service.GetmConnectionState() != BleDeviceService.STATE_CONNECTED) { Debug.WriteLine(TAG, "execute() : HVC_ERROR_DISCONNECTED"); return(HVC_ERROR_DISCONNECTED); } if (Status > STATE_CONNECTED) { Debug.WriteLine(TAG, "execute() : HVC_ERROR_BUSY"); return(HVC_ERROR_BUSY); } Status = STATE_BUSY; await Task.Run(() => { int nRet = HVC_NORMAL; byte[] outStatus = new byte[1]; nRet = Execute(30000, inExec, outStatus, res); if (Callback != null) { Callback.OnPostExecute(nRet, outStatus[0]); } if (Status == STATE_BUSY) { Status = STATE_CONNECTED; } }); //Thread t = new Thread(new ThreadStart(() => { // int nRet = HVC_NORMAL; // byte[] outStatus = new byte[1]; // nRet = Execute(10000, inExec, outStatus, res); // if (mCallback != null) // { // mCallback.onPostExecute(nRet, outStatus[0]); // } // if (mStatus == STATE_BUSY) // { // mStatus = STATE_CONNECTED; // } //})); //t.Start(); Debug.WriteLine(TAG, "execute() : HVC_NORMAL"); return(HVC_NORMAL); }
/// <summary> /// Constructor<br> /// [Description]<br> /// none<br> /// [Notes]<br> /// </summary> public FaceResult(HVC_RES parent) : base(parent) { this.Parent = parent; Dir = new DirResult(this); Age = new AgeResult(this); Gen = new GenResult(this); Gaze = new GazeResult(this); Blink = new BlinkResult(this); Exp = new ExpResult(this); }
/// <summary> /// Execute<br> /// [Description]<br> /// none<br> /// [Notes]<br> </summary> /// <param name="inTimeOutTime"> timeout time<br> </param> /// <param name="inExec"> executable function<br> </param> /// <param name="outStatus"> response code<br> </param> public virtual int Execute(int inTimeOutTime, int inExec, byte[] outStatus, HVC_RES result) { int nRet = 0; int[] nSize = { 0 }; byte[] sendData; byte[] recvData; sendData = new byte[32]; recvData = new byte[32]; result.ExecutedFunc = inExec; result.Body.Clear(); result.Hand.Clear(); result.Face.Clear(); /* Send Execute command signal */ sendData[0] = unchecked ((byte)(inExec & 0xff)); sendData[1] = unchecked ((byte)((inExec >> 8) & 0xff)); sendData[2] = 0; nRet = SendCommand(HVC_COM_EXECUTE, 3, sendData); if (nRet != 0) { return(nRet); } /* Receive header */ nRet = ReceiveHeader(inTimeOutTime, nSize, outStatus); if (nRet != 0) { return(nRet); } int numBody = 0; int numHand = 0; int numFace = 0; /* Receive result data */ if (nSize[0] >= 4) { nRet = ReceiveData(inTimeOutTime, 4, recvData); numBody = (recvData[0] & 0xff); numHand = (recvData[1] & 0xff); numFace = (recvData[2] & 0xff); if (nRet != 0) { return(nRet); } nSize[0] -= 4; } /* Get Human Body Detection result */ for (int i = 0; i < numBody; i++) { var body = new omron.HVC.HVC_RES.DetectionResult((new HVC_RES())); if (nSize[0] >= 8) { nRet = ReceiveData(inTimeOutTime, 8, recvData); body.PosX = ((recvData[0] & 0xff) + (recvData[1] << 8)); body.PosY = ((recvData[2] & 0xff) + (recvData[3] << 8)); body.Size = ((recvData[4] & 0xff) + (recvData[5] << 8)); body.Confidence = ((recvData[6] & 0xff) + (recvData[7] << 8)); if (nRet != 0) { return(nRet); } nSize[0] -= 8; } result.Body.Add(body); } /* Get Hand Detection result */ for (int i = 0; i < numHand; i++) { var hand = new omron.HVC.HVC_RES.DetectionResult((new HVC_RES())); if (nSize[0] >= 8) { nRet = ReceiveData(inTimeOutTime, 8, recvData); hand.PosX = ((recvData[0] & 0xff) + (recvData[1] << 8)); hand.PosY = ((recvData[2] & 0xff) + (recvData[3] << 8)); hand.Size = ((recvData[4] & 0xff) + (recvData[5] << 8)); hand.Confidence = ((recvData[6] & 0xff) + (recvData[7] << 8)); if (nRet != 0) { return(nRet); } nSize[0] -= 8; } result.Hand.Add(hand); } /* Face-related results */ for (int i = 0; i < numFace; i++) { var face = new omron.HVC.HVC_RES.FaceResult((new HVC_RES())); /* Face Detection result */ if (0 != (result.ExecutedFunc & HVC_ACTIV_FACE_DETECTION)) { if (nSize[0] >= 8) { nRet = ReceiveData(inTimeOutTime, 8, recvData); face.PosX = ((recvData[0] & 0xff) + (recvData[1] << 8)); face.PosY = ((recvData[2] & 0xff) + (recvData[3] << 8)); face.Size = ((recvData[4] & 0xff) + (recvData[5] << 8)); face.Confidence = ((recvData[6] & 0xff) + (recvData[7] << 8)); if (nRet != 0) { return(nRet); } nSize[0] -= 8; } } /* Face direction */ if (0 != (result.ExecutedFunc & HVC_ACTIV_FACE_DIRECTION)) { if (nSize[0] >= 8) { nRet = ReceiveData(inTimeOutTime, 8, recvData); face.Dir.Yaw = (short)((recvData[0] & 0xff) + (recvData[1] << 8)); face.Dir.Pitch = (short)((recvData[2] & 0xff) + (recvData[3] << 8)); face.Dir.Roll = (short)((recvData[4] & 0xff) + (recvData[5] << 8)); face.Dir.Confidence = (short)((recvData[6] & 0xff) + (recvData[7] << 8)); if (nRet != 0) { return(nRet); } nSize[0] -= 8; } } /* Age */ if (0 != (result.ExecutedFunc & HVC_ACTIV_AGE_ESTIMATION)) { if (nSize[0] >= 3) { nRet = ReceiveData(inTimeOutTime, 3, recvData); face.Age.Age = recvData[0]; face.Age.Confidence = (short)((recvData[1] & 0xff) + (recvData[2] << 8)); if (nRet != 0) { return(nRet); } nSize[0] -= 3; } } /* Gender */ if (0 != (result.ExecutedFunc & HVC_ACTIV_GENDER_ESTIMATION)) { if (nSize[0] >= 3) { nRet = ReceiveData(inTimeOutTime, 3, recvData); face.Gen.Gender = recvData[0]; face.Gen.Confidence = (short)((recvData[1] & 0xff) + (recvData[2] << 8)); if (nRet != 0) { return(nRet); } nSize[0] -= 3; } } /* Gaze */ if (0 != (result.ExecutedFunc & HVC_ACTIV_GAZE_ESTIMATION)) { if (nSize[0] >= 2) { nRet = ReceiveData(inTimeOutTime, 2, recvData); face.Gaze.GazeLR = recvData[0]; face.Gaze.GazeUD = recvData[1]; if (nRet != 0) { return(nRet); } nSize[0] -= 2; } } /* Blink */ if (0 != (result.ExecutedFunc & HVC_ACTIV_BLINK_ESTIMATION)) { if (nSize[0] >= 4) { nRet = ReceiveData(inTimeOutTime, 4, recvData); face.Blink.RatioL = (short)((recvData[0] & 0xff) + (recvData[1] << 8)); face.Blink.RatioR = (short)((recvData[2] & 0xff) + (recvData[3] << 8)); if (nRet != 0) { return(nRet); } nSize[0] -= 4; } } /* Expression */ if (0 != (result.ExecutedFunc & HVC_ACTIV_EXPRESSION_ESTIMATION)) { if (nSize[0] >= 3) { nRet = ReceiveData(inTimeOutTime, 3, recvData); face.Exp.Expression = recvData[0]; face.Exp.Score = recvData[1]; face.Exp.Degree = recvData[2]; if (nRet != 0) { return(nRet); } nSize[0] -= 3; } } result.Face.Add(face); } return(HVC_NORMAL); }
public abstract Task <int> Execute(int inExec, HVC_RES res);