/// <summary>
        /// Analyzing image using Vision API
        /// </summary>
        /// <param name="imageBytes"></param>
        /// <returns></returns>
        public static string MakeVisionAnalysisRequest(byte[] imageBytes)
        {
            string requestParameters = "analyze?visualFeatures=Categories,Description,Color,Tags,Faces,ImageType,Adult&language=en&details=Celebraties,Landmarks";
            string contentString     = HTTPExtension.HTTPClientCall(UrlVisionAPI, requestParameters, imageBytes, VisionAPI);

            // Display the JSON response.
            return(JsonPrettyPrint(contentString));
        }
        /// <summary>
        /// Tag details
        /// </summary>
        /// <param name="imageBytes"></param>
        /// <returns></returns>
        public static string MakeVisionDomainSpecificRequest(byte[] imageBytes)
        {
            string requestParameters = "models";//{model}/analyze .... Celebraties and Landmarks
            string contentString     = HTTPExtension.HTTPClientCall(UrlVisionAPI, requestParameters, imageBytes, VisionAPI);

            // Display the JSON response.
            return(JsonPrettyPrint(contentString));
        }
        /// <summary>
        /// Tag details
        /// </summary>
        /// <param name="imageBytes"></param>
        /// <returns></returns>
        public static string MakeVisionTagRequest(byte[] imageBytes)
        {
            string requestParameters = "tag";
            string contentString     = HTTPExtension.HTTPClientCall(UrlVisionAPI, requestParameters, imageBytes, VisionAPI);

            // Display the JSON response.
            return(JsonPrettyPrint(contentString));
        }
        /// <summary>
        ///  Gets the analysis of the specified image bytes by using the Face API.
        /// </summary>
        /// <param name="imageBytes"></param>
        /// <returns></returns>
        public static string MakeFaceAnalysisRequest(byte[] imageBytes)
        {
            string requestParameters = "?returnFaceId=true&returnFaceLandmarks=false" +
                                       "&returnFaceAttributes=age,gender,headPose,smile,facialHair,glasses" +
                                       ",emotion,hair,makeup,occlusion,accessories,blur,exposure,noise";
            string contentString = HTTPExtension.HTTPClientCall(uriFaceBase, requestParameters, imageBytes, FaceAPI);

            // Display the JSON response.
            return(JsonPrettyPrint(contentString));
        }
        /// <summary>
        /// Text Recogntion details
        /// </summary>
        /// <param name="imageBytes"></param>
        /// <returns></returns>
        public static string MakeVisionTextRequest(byte[] imageBytes)
        {
            string requestParameters = "ocr?language=unk&detectOrientation=true";
            //string requestParameters = "textOperations?operationId";
            //string requestParameters = "recognizeText?handwriting=true/false";
            string contentString = HTTPExtension.HTTPClientCall(UrlVisionAPI, requestParameters, imageBytes, VisionAPI);

            // Display the JSON response.
            return(JsonPrettyPrint(contentString));
        }
 public async void VerifyFace(string val1, string val2)
 {
     HTTPExtension ht = new HTTPExtension();
     // var jsonValue = new JsonObjectValue { face1 = val1, face2 = val2 };
     var contentString = await ht.HTTPClientVerifyCallAsync(VerifyFaceAPI, FaceAPI, Guid.Parse(val1), Guid.Parse(val2));
 }