private double[,] GetFaceAffine(Rect origRect, Point origLeftEye, Point origRightEye, Rect targetRect, Point targetLeftEye, Point targetRightEye) { // Step 1 - Construct the affine transformation // Find mapping between orig and desired EyePosAsMatrix locations + a // fake point located at right angles to the vector joing the two eyes //INumArray<float> origMat = ArrFactory.FloatArray(3, 2); //INumArray<float> targetMat = ArrFactory.FloatArray(3, 3); INumArray <float> targetMat = ArrFactory.FloatArray(3, 2); INumArray <float> origMat = ArrFactory.FloatArray(3, 3); FaceSortUI.ImageUtils.EyePosAsMatrix(origRect, origLeftEye, origRightEye, ref origMat); FaceSortUI.ImageUtils.EyePosAsMatrix(targetRect, targetLeftEye, targetRightEye, ref targetMat); //targetMat[0, 2] = 1.0F; //targetMat[1, 2] = 1.0F; //targetMat[2, 2] = 1.0F; //SVDFloat svd = new SVDFloat(targetMat); //INumArray<float> sss = svd.Solve(origMat); origMat[0, 2] = 1.0F; origMat[1, 2] = 1.0F; origMat[2, 2] = 1.0F; SVDFloat svd = new SVDFloat(origMat); INumArray <float> sss = svd.Solve(targetMat); INumArray <float> mmm = (INumArray <float>)sss.Transpose(); double[,] affineMat = ArrFactory.DoubleArray(mmm).ToArray(); return(affineMat); }
/// <summary> /// Extract a normalized face from an input image. Normalization is done /// by ensuring that the eye positions in the original image /// map to the specified position in the face image. The input /// inage is assumed to be 1 byte per channel. Steps involved /// 1. Construct an affine mapping from Destination to original eye location /// 2. Fill in the destination image using the mapping /// </summary> /// <param name="origImage">Input image as a byte array</param> /// <param name="origRect">Size of the original image</param> /// <param name="origLeftEye">Left eye location in source image</param> /// <param name="origRightEye">Right eye location in source</param> /// <param name="bytePerPix"># bytes per Pixel in original image.Since we assume 1 byte per channel this is same as # channels Face is constructed with same</param> /// <param name="faceRect">Desired face size</param> /// <param name="faceLeftEye">Desired left eye location in face</param> /// <param name="faceRightEye">Desired right eye loc in face</param> /// <returns>Images array representing the colour plane of extracted face</returns> static public Image[] ExtractNormalizeFace(Image[] origImage, Rect origRect, Point origLeftEye, Point origRightEye, int bytePerPix, Rect faceRect, Point faceLeftEye, Point faceRightEye) { // Sanity check eye location if (false == origRect.Contains(origLeftEye) || false == origRect.Contains(origRightEye)) { return(null); } // Step 1 - Construct the affine transformation // Find mapping between orig and desired EyePosAsMatrix locations + a // fake point located at right angles to the vector joing the two eyes INumArray <float> origMat = ArrFactory.FloatArray(3, 2); INumArray <float> faceMat = ArrFactory.FloatArray(3, 3); EyePosAsMatrix(origRect, origLeftEye, origRightEye, ref origMat); EyePosAsMatrix(faceRect, faceLeftEye, faceRightEye, ref faceMat); faceMat[0, 2] = 1.0F; faceMat[1, 2] = 1.0F; faceMat[2, 2] = 1.0F; SVDFloat svd = new SVDFloat(faceMat); INumArray <float> sss = svd.Solve(origMat); INumArray <float> mmm = (INumArray <float>)sss.Transpose(); double [,] affineMat = ArrFactory.DoubleArray(mmm).ToArray(); return(TransformImage(origImage, origRect, faceRect, affineMat, bytePerPix)); }