/// <summary>
 /// Get normalized face image.
 ///
 /// This function returns normalized face image with corresponding feature points.
 /// Size of the normalized face in the image is such that inter-pupillary distance is approximately quarter of the image width.
 ///
 /// Face will be normalized to a varying degree depending on normalization type. For example rotated
 /// face with open mouth will only have its pose straightened with normalization type VS_NORM.POSE while
 /// with addition of VS_NORM.AU normalized face will also have closed mouth.
 ///
 /// Note that the face will always have its pose straightened.
 ///
 /// Types of normalization are:
 ///   - VS_NORM.POSE - face translation and rotation are set to zero thereby normalizing the pose
 ///   - VS_NORM.SU - parameters describing the face shape (shape units) are set to zero thereby normalizing the face shape
 ///   - VS_NORM.AU - parameters describing facial movements (action units) are set to zero, for example open mouth will be closed
 ///
 /// Different types of normalization can be combined with "|" operator, for example VS_NORM.POSE | VS_NORM.SU.
 /// </summary>
 /// <param name="frame">Image containing the face to be normalized, must be grey-scale</param>
 /// <param name="normFace">Image containing the normalized face; it must be allocated before calling the function; face size will depend on this image size</param>
 /// <param name="face_data">FaceData structure containing the information about the face that will be normalized</param>
 /// <param name="normFDP">Features points that correspond to the normalized face; coordinates are normalized to 0-1 range</param>
 /// <param name="norm_type">Normalization type, a binary combination of VS_NORM.POSE - normalizes pose, VS_NORM.SU - normalizes shape units and VS_NORM.AU - normalizes action units</param>
 /// <param name="dataPath">Path to the folder where Face Detector.cfg is located, default values is ""</param>
 ///
 public void GetNormalizedFaceImage(VSImage ^ frame, VSImage ^ normFace, FaceData ^ face_data, FDP ^ % normFDP, VS_NORM norm_type, System::String ^ dataPath);
 /// <summary>
 /// Estimates age from a facial image.
 ///
 /// The function returns estimated age. Prior to using this function, it is necessary to process the facial image or video frame using
 /// VisageTracker or VisageFeaturesDetector and pass the obtained data to this function.
 /// An example of use, estimate age on the first face detected in an image file:
 /// </summary>
 /// \code
 /// VsImage ///frame = 0;
 /// frame = cvLoadImage(fileName);
 /// VisageFaceAnalyser /// m_VFA= new VisageFaceAnalyser();
 /// const char ///dataPath="./bdtsdata/LBF/vfadata";
 /// int is_initialized = m_VFA->init(dataPath);
 ///
 /// if((is_initialized & VFA_AGE) == VFA_AGE)
 /// {
 /// int maxFaces = 100;
 /// FaceData/// data = new FaceData[maxFaces];
 ///
 /// int n_faces = m_Detector->detectFacialFeatures(frame, data, maxFaces);
 ///
 /// if (n_faces > 0)
 /// {
 ///     float detectionSuccessful = m_VFA->estimateAge(&data[i]);
 /// }
 /// }
 /// \endcode
 ///
 /// <param name="facedata">Needs to contain FDP data. FDP data remains unchanged.</param>
 /// <returns>Returns estimated age and -1 if it failed.</returns>
 ///
 public float EstimateAge(FaceData facedata);
 /// <summary>
 /// Estimates emotion from a facial image.
 /// The function returns estimated probabilities for basic emotions. Prior to using this function, it is necessary to process the facial image or video frame using
 /// VisageTracker or VisageFeaturesDetector and pass the obtained data to this function.
 /// </summary>
 /// <param name="facedata">Needs to contain FDP data. FDP data remains unchanged.</param>
 /// <param name="probEstimates">Array of 7 doubles. If successful, the function will fill this array with estimated probabilities for emotions in this order: anger, disgust, fear, happiness, sadness, surprise and neutral.
 /// Each probability will have a value between 0 and 1. Sum of probabilities does not have to be 1.</param>
 /// <returns>Returns 1 if estimation was successful.</returns>
 ///
 /// See also: Facedata, VisageTracker, VisageDetector
 ///
 public int EstimateEmotion(FaceData facedata, float[] probEstimates);
 /// <summary>
 /// Estimates gender from a facial image.
 ///
 /// The function returns 1 if estimated gender is male and 0 if it is a female. Prior to using this function, it is necessary to process the facial image or video frame using VisageTracker or VisageFeaturesDetector and pass the obtained data to this function.
 /// \if WIN_DOXY
 ///
 /// The function returns 1 if estimated gender is male and 0 if it is a female. Prior to using this function, it is necessary to process the facial image or video frame using
 /// VisageTracker or VisageFeaturesDetector and pass the obtained data to this function.
 /// An example of use, estimate gender on the first face detected in an image file:
 ///
 /// \code
 ///
 /// // load an image and store it in VSImage object
 /// VisageFaceAnalyser vfa = new VisageFaceAnalyser();
 /// int isInitialised = vfa.init("./bdtsdata/LBF/vfadata");
 ///
 /// int maxFaces = 100;
 /// FaceData[] dataArray = new FaceData[maxFaces];
 ///
 /// if (isInitialised)
 /// {
 ///    int numFaces = vfd.detectFacialFeatures(frame, dataArray, maxFaces, 0.1f);
 ///
 ///     if (numFaces > 0)
 ///         int gender = vfa.estimateGender(data);
 /// }
 /// \endcode
 /// \endif
 ///
 /// </summary>
 /// <param name="facedata">Needs to contain FDP data. FDP data remains unchanged.</param>
 /// <returns>Returns 0 if estimated gender is female, 1 if it is a male and -1 if it failed.</returns>
 public int EstimateGender(FaceData facedata);
 /// <summary>
 /// Extracts a face descriptor from the input RGB image and adds it to the gallery.
 /// </summary>
 /// <param name="image"> VsImage pointer that contains the input RGB image. The image should contain only one face and this face will be added to the gallery. In case of multiple faces in the image, it is not defined which face would be used. </param>
 /// <param name="facedata"> Facial data obtained from VisageTracker or VisageFeaturesDetector. </param>
 /// <param name="name"> Name of the face in the image. </param>
 /// <returns> Returns 1 on success, 0 on failure. The function may fail if the face is not found in the image or if the image argument is not a valid RGB image pointer. </returns>
 ///
 ///
 /// See also: VSImage
 int AddDescriptor(VSImage ^ image, FaceData ^ facedata, System::String ^ name);
 /// <summary>
 /// Extracts the face descriptor for face recognition from a facial image. Prior to using this function, it is neccessary to process the facial image or video frame using VisageTracker or VisageFeaturesDetector and pass the obtained facial data to this function.
 ///
 /// </summary>
 /// <param name="facedata"> Facial data obtained from VisageTracker or VisageFeaturesDetector. </param>
 /// <param name="image"> VsImage pointer to the input RGB image. </param>
 /// <param name="descriptor"> Pointer to a DESCRIPTOR_SIZE-dimensional array of short. The resulting face descriptor is returned in this array. </param>
 /// <returns> Returns 1 on success, 0 on failure. </returns>
 ///
 /// See also: FaceData, VisageTracker, VisageFeaturesDetector
 ///
 int ExtractDescriptor(FaceData ^ facedata, VSImage ^ image, short[] descriptor);