public static void Run() { // ExStart:1 PdfApi pdfApi = new PdfApi(Common.APP_KEY, Common.APP_SID, Common.BASEPATH); StorageApi storageApi = new StorageApi(Common.APP_KEY, Common.APP_SID, Common.BASEPATH); String fileName = "Sample-Annotation.pdf"; int pageNumber = 1; int annotationNumber = 1; String storage = ""; String folder = ""; try { // Upload source file to aspose cloud storage storageApi.PutCreate(fileName, "", "", System.IO.File.ReadAllBytes(Common.GetDataDir() + fileName)); // Invoke Aspose.PDF Cloud SDK API to get specific annotation from pdf page AnnotationResponse apiResponse = pdfApi.GetPageAnnotation(fileName, pageNumber, annotationNumber, storage, folder); if (apiResponse != null && apiResponse.Status.Equals("OK")) { Annotation annotation = apiResponse.Annotation; Console.WriteLine("Annotation Content" + annotation.Contents); Console.ReadKey(); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("error:" + ex.Message + "\n" + ex.StackTrace); } // ExEnd:1 }
public async Task GetAnnotation() { foreach (ulong annotationId in _inputData.Annotations) { AnnotationResponse annotationResponse = await _geniusClient.AnnotationClient.GetAnnotation(annotationId); Assert.IsNotNull(annotationResponse); } }
public async Task <ActionResult <AnnotationResponse> > Annotations([FromBody] AnnotationRequest request) { await Task.Delay(TimeSpan.FromMilliseconds(100)); var requestJson = JsonConvert.SerializeObject(request); _logger.LogInformation(requestJson); var response = new AnnotationResponse(); return(Ok(response)); }
public void Pdf_Annotations_Tests() { try { AnnotationsResponse annotationsResponse = pdfService.Annotations.ReadDocumantPageAnnotations("pdf-sample.pdf", 1, Utils.CloudStorage_Input_Folder); AnnotationResponse annotationResponse2 = pdfService.Annotations.ReadDocumentPageAnnotationByItsNumber("pdf-sample.pdf", 1, 1, Utils.CloudStorage_Input_Folder); } catch (Exception ex) { Assert.Fail(ex.Message); } }
public async Task <RecognitionResult.Face[]> DetectEmotions(FaceCropResult.Face[] faces) { AnnotationRequest request = new AnnotationRequest(); request.Requests = new AnnotationRequest.AnnotateImageRequest[faces.Length]; for (int i = 0; i < faces.Length; i++) { FaceCropResult.Face face = faces[i]; var r = new AnnotationRequest.AnnotateImageRequest { ImageData = new AnnotationRequest.AnnotateImageRequest.Image { Content = face.ImageBase64 }, Features = new AnnotationRequest.AnnotateImageRequest.Feature[] { new AnnotationRequest.AnnotateImageRequest.Feature { Type = "FACE_DETECTION", MaxResults = 5 } } }; request.Requests[i] = r; } try { HttpClient client = new HttpClient(); HttpContent content = new StringContent(JsonConvert.SerializeObject(request)); var httpResponse = await client.PostAsync("https://vision.googleapis.com/v1/images:annotate?key=" + _apiKey, content); string responseData = await httpResponse.Content.ReadAsStringAsync(); if (httpResponse.IsSuccessStatusCode) { AnnotationResponse response = JsonConvert.DeserializeObject <AnnotationResponse>(responseData); List <RecognitionResult.Face> faceResults = new List <RecognitionResult.Face>(); for (int i = 0; i < response.Responses.Length && i < faces.Length; i++) { AnnotationResponse.AnnotateImageResponse.FaceAnnotations faceAnnotations = response.Responses[i].FaceAnnotationData[0]; RecognitionResult.Face faceResult = new RecognitionResult.Face { Emotion = new RecognitionResult.Emotion { Anger = FromLikelyhood(faceAnnotations.Anger), Happiness = FromLikelyhood(faceAnnotations.Joy), Neutral = 0.0, Sadness = FromLikelyhood(faceAnnotations.Sorrow), Surprise = FromLikelyhood(faceAnnotations.Surprise) } }; faceResults.Add(faceResult); } return(faceResults.ToArray()); } } catch (Exception ex) { // TODO? } return(null); }