public void GetActionParameters_WithDataBind() { var d = new ARUtils().GetActionParameters((TestController c) => c.MethodWithDataBind(new TestController.Entity { PK = 1 })); Assert.AreEqual(1, d["db.PK"]); Assert.AreEqual(null, d["db.Name"]); }
public void GetPrimaryKey() { var p = new ARUtils().GetPrimaryKey(typeof (ClassWithPrimaryKey)); Assert.IsNotNull(p); Expression<Func<ClassWithPrimaryKey, int>> e = c => c.PK; var expr = (MemberExpression) e.Body; Assert.AreEqual(expr.Member, p); }
public void GetActionParameters_WithARFetch() { var d = new ARUtils().GetActionParameters((TestController c) => c.MethodWithARFetch(new TestController.Entity { PK = 1 })); Assert.AreEqual(1, d["bla"]); }
public void GetActionParameters_WithARFetch_nullParameter() { var d = new ARUtils().GetActionParameters((TestController c) => c.MethodWithARFetch(null)); Assert.IsNull(d["bla"]); }
public void GetActionParameters_plain() { var d = new ARUtils().GetActionParameters((TestController c) => c.MethodWithoutARDataBind("one")); Assert.AreEqual("one", d["bla"]); }
public void GetPrimaryKey_with_no_primarykey_should_return_null() { var p = new ARUtils().GetPrimaryKey(typeof(ClassWithoutPrimaryKey)); Assert.IsNull(p); }
public void GetPrimaryKey_inherited() { var p = new ARUtils().GetPrimaryKey(typeof(InheritedFromClassWithPrimaryKey)); Assert.IsNotNull(p); Expression<Func<InheritedFromClassWithPrimaryKey, int>> e = c => c.PK; var expr = (MemberExpression)e.Body; Assert.AreEqual(expr.Member.Name, p.Name); }
public override void UpdateValue() { if (matSourceGetterInterface == null) { return; } if (faceLandmarkGetterInterface == null) { return; } Mat rgbaMat = matSourceGetterInterface.GetMatSource(); if (rgbaMat == null) { return; } else { if (rgbaMat.width() != imageWidth || rgbaMat.height() != imageHeight) { imageWidth = rgbaMat.width(); imageHeight = rgbaMat.height(); SetCameraMatrix(camMatrix, imageWidth, imageHeight); } } didUpdateHeadPositionAndRotation = false; List <Vector2> points = faceLandmarkGetterInterface.GetFaceLanmarkPoints(); if (points != null) { MatOfPoint3f objectPoints = null; if (points.Count == 68) { objectPoints = objectPoints68; imagePoints.fromArray( new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth) new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth) new Point(points [30].x, points [30].y), //nose (Tip) new Point(points [33].x, points [33].y), //nose (Subnasale) new Point(points [0].x, points [0].y), //l ear (Bitragion breadth) new Point(points [16].x, points [16].y) //r ear (Bitragion breadth) ); } else if (points.Count == 17) { objectPoints = objectPoints17; imagePoints.fromArray( new Point((points [2].x + points [3].x) / 2, (points [2].y + points [3].y) / 2), //l eye (Interpupillary breadth) new Point((points [4].x + points [5].x) / 2, (points [4].y + points [5].y) / 2), //r eye (Interpupillary breadth) new Point(points [0].x, points [0].y), //nose (Tip) new Point(points [1].x, points [1].y), //nose (Subnasale) new Point(points [6].x, points [6].y), //l ear (Bitragion breadth) new Point(points [8].x, points [8].y) //r ear (Bitragion breadth) ); } else if (points.Count == 6) { objectPoints = objectPoints6; imagePoints.fromArray( new Point((points [2].x + points [3].x) / 2, (points [2].y + points [3].y) / 2), //l eye (Interpupillary breadth) new Point((points [4].x + points [5].x) / 2, (points [4].y + points [5].y) / 2), //r eye (Interpupillary breadth) new Point(points [0].x, points [0].y), //nose (Tip) new Point(points [1].x, points [1].y) //nose (Subnasale) ); } // Estimate head pose. if (rvec == null || tvec == null) { rvec = new Mat(3, 1, CvType.CV_64FC1); tvec = new Mat(3, 1, CvType.CV_64FC1); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); } double tvec_x = tvec.get(0, 0) [0], tvec_y = tvec.get(1, 0) [0], tvec_z = tvec.get(2, 0) [0]; bool isNotInViewport = false; Vector4 pos = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f); if (pos.w != 0) { float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w; if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f) { isNotInViewport = true; } } if (double.IsNaN(tvec_z) || isNotInViewport) // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view) { Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); } else { Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE); } //Debug.Log (tvec.dump () + " " + isNotInViewport); if (!isNotInViewport) { // Convert to unity pose data. double[] rvecArr = new double[3]; rvec.get(0, 0, rvecArr); double[] tvecArr = new double[3]; tvec.get(0, 0, tvecArr); PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr); // adjust the position to the scale of real-world space. poseData.pos = new Vector3(poseData.pos.x * 0.001f, poseData.pos.y * 0.001f, poseData.pos.z * 0.001f); // Changes in pos/rot below these thresholds are ignored. if (enableLowPassFilter) { ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass); } oldPoseData = poseData; Matrix4x4 transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one); // right-handed coordinates system (OpenCV) to left-handed one (Unity) transformationM = invertYM * transformationM; // Apply Z axis inverted matrix. transformationM = transformationM * invertZM; headPosition = ARUtils.ExtractTranslationFromMatrix(ref transformationM); headRotation = ARUtils.ExtractRotationFromMatrix(ref transformationM); didUpdateHeadPositionAndRotation = true; } } }
void Update() { if (!initDone) { return; } // Show tracking result if (0 < Plugins.ULS_UnityGetPoints(_trackPoints)) { #if DRAW_MARKERS for (int j = 0; j < Plugins.MAX_TRACKER_POINTS; ++j) { _marker2d[j].transform.localPosition = new Vector3(_trackPoints[j * 2], _trackPoints[j * 2 + 1], 0); _marker2d[j].SetActive(false); } #endif // get transform matrix for alignment 3d objects Plugins.ULS_UnityGetTransform(_mtx, intrinsic_camera_matrix, null); for (int i = 0; i < 16; ++i) { transformationM[i] = _mtx[i]; } ARM = adjustMatrix * transformationM; // apply alignment matrix to object's transform ARUtils.SetTransformFromMatrix(_anchor3d.transform, ref ARM); // apply local scale to fit user's face //Plugins.ULS_UnityGetScale3D(_scaleX, _scaleY, _scaleZ); //Vector3 s = new Vector3(_scaleX[0], _scaleY[0], _scaleZ[0]); //s.Scale(_original_scale); //_helmet.transform.localScale = s; //distancia de los cachetes float disAn = Vector3.Distance(_marker2d[0].transform.position, _marker2d[4].transform.position); //Distancia de la nariz al punto medio de los ojos float disAl = Vector3.Distance(_marker2d[9].transform.position, _marker2d[10].transform.position); //distancia de la boca float DisMouth = Vector3.Distance(_marker2d[28].transform.position, _marker2d[29].transform.position); //distancia constante para calcular la escala float d = disAn / disAl; // _helmet.transform.localScale = new Vector3(d, d, d); Debug.Log(d); float parcentaje = DisMouth / disAn; if (parcentaje > 0.05f) { float newPosZ = parcentaje * 500; _anchor3d.transform.position = new Vector3(_anchor3d.transform.position.x, _anchor3d.transform.position.y, _anchor3d.transform.position.z + newPosZ); } } else { _anchor3d.transform.position = new Vector3(0, 0, -180.0f); } }