// ----------------------------------------------------------------------- // KEY SAMPLE CODE STARTS HERE // ----------------------------------------------------------------------- private async Task DetectFaces(string subscriptionKey, string filePath) { _dataContext.IsWorking = true; _dataContext.SourceUri = null; _dataContext.ResultText = null; Helpers.Log(LogIdentifier, "Start face tracking"); VideoServiceClient client = new VideoServiceClient(subscriptionKey); client.Timeout = TimeSpan.FromMinutes(10); using (FileStream originalStream = new FileStream(filePath, FileMode.Open, FileAccess.Read)) { // Creates a video operation of face tracking Helpers.Log(LogIdentifier, "Start uploading video"); Operation operation = await client.CreateOperationAsync(originalStream, new FaceDetectionOperationSettings()); Helpers.Log(LogIdentifier, "Uploading video done"); // Starts querying service status OperationResult result = await client.GetOperationResultAsync(operation); while (result.Status != OperationStatus.Succeeded && result.Status != OperationStatus.Failed) { Helpers.Log(LogIdentifier, "Server status: {0}, wait {1} seconds...", result.Status, QueryWaitTime.TotalSeconds); await Task.Delay(QueryWaitTime); result = await client.GetOperationResultAsync(operation); } Helpers.Log(LogIdentifier, "Finish processing with server status: " + result.Status); // Processing finished, checks result if (result.Status == OperationStatus.Succeeded) { // Gets output JSON Helpers.Log(LogIdentifier, "Downloading result done"); _dataContext.SourceUri = new Uri(filePath); _dataContext.ResultText = Helpers.FormatJson<FaceTracking>(result.ProcessingResult); _dataContext.FrameHighlights = GetHighlights(result.ProcessingResult).ToList(); } else { // Failed Helpers.Log(LogIdentifier, "Fail reason: " + result.Message); } _dataContext.IsWorking = false; } }
public async Task UploadVideoAsync(MediaFile file) { Processing = true; VideoServiceClient = new VideoServiceClient("9739e652e7214256ac48cb85e641a96e") { Timeout = TimeSpan.FromMinutes(10) }; //Operation videoOperation; try { //Stream testStream = file.GetStream(); using (Stream videoStream = file.GetStream()) { var operation = await VideoServiceClient.CreateOperationAsync(videoStream, new FaceDetectionOperationSettings()); OperationResult result = await VideoServiceClient.GetOperationResultAsync(operation); while (result.Status != OperationStatus.Succeeded && result.Status != OperationStatus.Failed) { Debug.WriteLine( $"Server status: {result.Status}, wait {QueryWaitTime.TotalSeconds} seconds"); await Task.Delay(QueryWaitTime); result = await VideoServiceClient.GetOperationResultAsync(operation); } Debug.WriteLine($"Finish processing with server status: {result.Status}"); // Processing finished, check result if (result.Status == OperationStatus.Succeeded) { var faceTrackingResult = JsonConvert.DeserializeObject<FaceTracking>(result.ProcessingResult); NaturalVideoHeight = faceTrackingResult.Height; NaturalVideoWidth = faceTrackingResult.Width; FrameHighlights = GetHighlights(result.ProcessingResult).ToList(); } } } catch (Exception ex) { // ignored } Processing = false; }