public async Task <ActionResult> Upload(IFormFile file) { if (file?.Length > 0) { // Make sure the user selected an image file if (!file.ContentType.StartsWith("image")) { TempData["Message"] = "Only image files may be uploaded"; } else { try { // Save the original image in the "photos" container CloudStorageAccount account = CloudStorageAccount.Parse(_configuration.GetConnectionString("Storage")); CloudBlobClient client = account.CreateCloudBlobClient(); CloudBlobContainer container = client.GetContainerReference("photos"); CloudBlockBlob photo = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await photo.UploadFromStreamAsync(file.OpenReadStream()); // Generate a thumbnail and save it in the "thumbnails" container using (var formFileStream = file.OpenReadStream()) using (var sourceImage = Image.FromStream(formFileStream)) { var newWidth = 192; var newHeight = (Int32)(1.0 * sourceImage.Height / sourceImage.Width * newWidth); using (var destinationImage = new Bitmap(sourceImage, new Size(newWidth, newHeight))) using (var stream = new MemoryStream()) { destinationImage.Save(stream, sourceImage.RawFormat); stream.Seek(0L, SeekOrigin.Begin); container = client.GetContainerReference("thumbnails"); CloudBlockBlob thumbnail = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await thumbnail.UploadFromStreamAsync(stream); } } // Submit the image to Azure's Computer Vision API ComputerVisionClient vision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(_configuration.GetSection("Vision").GetValue <String>("Key")), new System.Net.Http.DelegatingHandler[] { } ); vision.Endpoint = _configuration.GetSection("Vision").GetValue <String>("Endpoint"); VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Description }; ImageAnalysis result = await vision.AnalyzeImageAsync(photo.Uri.ToString(), features); // Record the image description and tags in blob metadata photo.Metadata.Add("Caption", result.Description.Captions[0].Text); for (int i = 0; i < result.Description.Tags.Count; i++) { string key = String.Format("Tag{0}", i); photo.Metadata.Add(key, result.Description.Tags[i]); } await photo.SetMetadataAsync(); } catch (Exception ex) { // In case something goes wrong TempData["Message"] = ex.Message; } } } return(RedirectToAction("Index")); }
public async Task <ActionResult> Upload(HttpPostedFileBase image) { if (image != null && image.ContentLength > 0) { // Determines if selected file is an image if (!image.ContentType.StartsWith("image")) { TempData["Message"] = "Only image files may be uploaded"; } else { try { // Stores the original image in the respective (images) container CloudStorageAccount storageAccount = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["StorageConnectionString"]); CloudBlobClient clientBlob = storageAccount.CreateCloudBlobClient(); CloudBlobContainer containerBlob = clientBlob.GetContainerReference("images"); CloudBlockBlob photoBlock = containerBlob.GetBlockBlobReference(Path.GetFileName(image.FileName)); await photoBlock.UploadFromStreamAsync(image.InputStream); // Creates a thumbnail and stores it in the respective container using (var outputStream = new MemoryStream()) { image.InputStream.Seek(0L, SeekOrigin.Begin); var settings = new ResizeSettings { MaxWidth = 192 }; ImageBuilder.Current.Build(image.InputStream, outputStream, settings); outputStream.Seek(0L, SeekOrigin.Begin); containerBlob = clientBlob.GetContainerReference("thumbnails"); CloudBlockBlob thumbnail = containerBlob.GetBlockBlobReference(Path.GetFileName(image.FileName)); await thumbnail.UploadFromStreamAsync(outputStream); //Passes the URL of the image that the user uploaded to the blob, to the Vision API which generate a description for the image ComputerVisionClient vision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(ConfigurationManager.AppSettings["SubscriptionKey"]), new System.Net.Http.DelegatingHandler[] { }); vision.Endpoint = ConfigurationManager.AppSettings["VisionEndpoint"]; VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Description }; var result = await vision.AnalyzeImageAsync(photoBlock.Uri.ToString(), features); //Stores the description in the blob metadata for the respective image photoBlock.Metadata.Add("Caption", result.Description.Captions[0].Text); for (int i = 0; i < result.Description.Tags.Count; i++) { string key = String.Format("Tag{0}", i); photoBlock.Metadata.Add(key, result.Description.Tags[i]); } await photoBlock.SetMetadataAsync(); } } catch (Exception ex) { // If an error occurs TempData["Message"] = ex.Message; } } } return(RedirectToAction("Index")); }
// mode : "shots", "scenes", "purge", "load" // [HttpPost] public async Task Importviscenes(string mode, string videoId, string viAcctID, string viSubKey, string viLocation, string translationLang) { if (mode == "load") { return; } Functions.SendProgress("Cleaning...", 0, 1); // Pass a list of blob URIs in ViewBag CloudStorageAccount account = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["StorageConnectionString"]); CloudBlobClient client = account.CreateCloudBlobClient(); CloudBlobContainer containerVideoId = client.GetContainerReference(videoId); // Let's purge the data // let's purge all existing blobs List <IListBlobItem> blobsList = new List <IListBlobItem>(); try { blobsList = containerVideoId.ListBlobs().ToList(); List <Task> myTasks = new List <Task>(); foreach (IListBlobItem item in containerVideoId.ListBlobs(useFlatBlobListing: true)) { var blob = item as CloudBlockBlob; if (blob != null) { myTasks.Add(blob.DeleteAsync()); } } await Task.WhenAll(myTasks.ToArray()).ConfigureAwait(false); } catch (Exception exc) { ViewBag.ErrorMsg = exc.ToString(); Functions.SendProgress("Error in ListBlobs:" + exc.Message, 0, 1); } // user wants only purge if (mode == "purge") { return; } Functions.SendProgress("Initialization...", 0, 1); await containerVideoId.CreateIfNotExistsAsync(BlobContainerPublicAccessType.Container, null, null).ConfigureAwait(false); // Translator credentials string translatorSubscriptionKey = ConfigurationManager.AppSettings["TranslatorSubscriptionKey"]; string translatorEndpoint = ConfigurationManager.AppSettings["TranslatorEndpoint"]; // Computer vision init ComputerVisionClient vision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(ConfigurationManager.AppSettings["VisionSubscriptionKey"]), new System.Net.Http.DelegatingHandler[] { } ); vision.Endpoint = ConfigurationManager.AppSettings["VisionEndpoint"]; VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Description }; // test code get all thumbnails string jsonData = ""; VideoIndexer myVI = new VideoIndexer(viAcctID, viLocation, viSubKey); try { //videoToken = await myVI.GetVideoAccessTokenAsync(videoId).ConfigureAwait(false); jsonData = await myVI.GetInsightsAsync(videoId).ConfigureAwait(false); } catch (Exception exc) { ViewBag.ErrorMsg = exc.ToString(); Functions.SendProgress("Error in GetInsightsAsync:" + exc.Message, 0, 1); return; } // SAVING INSIGHTS AS A BLOB CloudBlockBlob insightsBlob = containerVideoId.GetBlockBlobReference(dirInsights + "/" + fileInsights); JObject jObj = JObject.Parse(jsonData); await insightsBlob.UploadTextAsync(jObj.ToString(Formatting.Indented)).ConfigureAwait(false); List <BlobInfo> blobs = new List <BlobInfo>(); Dictionary <TimeSpan, string> shotsTimingAndThumbnailsId = new Dictionary <TimeSpan, string>(); Dictionary <TimeSpan, string> scenesTimingAndThumbnailId = new Dictionary <TimeSpan, string>(); dynamic viInsights = JsonConvert.DeserializeObject <dynamic>(jsonData); var video = viInsights.videos[0]; var shots = video.insights.shots; var scenes = video.insights.scenes; // list of shots foreach (var shot in shots) { foreach (var keyFrame in shot.keyFrames) { foreach (var instance in keyFrame.instances) { string thumbnailId = (string)instance.thumbnailId; string thumbnailStartTime = (string)instance.adjustedStart; shotsTimingAndThumbnailsId.Add(TimeSpan.Parse(thumbnailStartTime, CultureInfo.InvariantCulture), thumbnailId); } } } var listTimings = shotsTimingAndThumbnailsId.Select(d => d.Key).ToList().OrderBy(d => d); //list of scenes (a scene contains several shots, but in the JSON, thumbnails are not defined in scenes) if (scenes != null) // sometimes, there is no scene ! { foreach (var scene in scenes) { TimeSpan start = TimeSpan.Parse((string)scene.instances[0].adjustedStart, CultureInfo.InvariantCulture); var closestTime = listTimings.OrderBy(t => Math.Abs((t - start).Ticks)) .First(); scenesTimingAndThumbnailId.Add(closestTime, shotsTimingAndThumbnailsId[closestTime]); } } // it's the list of thumbnails we want to process (scenes or all shots) Dictionary <TimeSpan, string> thumbnailsToProcessTimeAndId = new Dictionary <TimeSpan, string>(); if (mode == "scenes") // scenes only { if (scenes == null) // no scenes, let's quit { Functions.SendProgress($"No scenes !", 10, 10); return; } thumbnailsToProcessTimeAndId = scenesTimingAndThumbnailId; } else // all shots { thumbnailsToProcessTimeAndId = shotsTimingAndThumbnailsId;; } int index = 0; foreach (var thumbnailEntry in thumbnailsToProcessTimeAndId) { Functions.SendProgress($"Processing {thumbnailsToProcessTimeAndId.Count} thumbnails...", index, thumbnailsToProcessTimeAndId.Count); index++; //if (index == 100) break; string thumbnailId = thumbnailEntry.Value; var thumbnailStartTime = thumbnailEntry.Key; // Get the video thumbnail data and upload to photos folder var thumbnailHighResStream = await myVI.GetVideoThumbnailAsync(videoId, thumbnailId).ConfigureAwait(false); CloudBlockBlob thumbnailHighResBlob = containerVideoId.GetBlockBlobReference(dirHighRes + "/" + thumbnailId + ".jpg"); await thumbnailHighResBlob.UploadFromStreamAsync(thumbnailHighResStream).ConfigureAwait(false); // let's create the low res version using (var thumbnailLowResStream = new MemoryStream()) { thumbnailHighResStream.Seek(0L, SeekOrigin.Begin); var settings = new ResizeSettings { MaxWidth = 192 }; ImageBuilder.Current.Build(thumbnailHighResStream, thumbnailLowResStream, settings); thumbnailLowResStream.Seek(0L, SeekOrigin.Begin); CloudBlockBlob thumbnailLowRes = containerVideoId.GetBlockBlobReference(dirLowRes + "/" + thumbnailId + ".jpg"); await thumbnailLowRes.UploadFromStreamAsync(thumbnailLowResStream).ConfigureAwait(false); } // Submit the image to Azure's Computer Vision API var result = await vision.AnalyzeImageAsync(thumbnailHighResBlob.Uri.ToString(), features).ConfigureAwait(false); // cleaning metadata on blobs thumbnailHighResBlob.Metadata.Clear(); thumbnailHighResBlob.Metadata.Add("Index", index.ToString(CultureInfo.InvariantCulture)); // Record the image description and tags in blob metadata if (result.Description.Captions.Count > 0) { thumbnailHighResBlob.Metadata.Add("Description", result.Description.Captions[0].Text); thumbnailHighResBlob.Metadata.Add("Confidence", (result.Description.Captions[0].Confidence * 100).ToString("F1", CultureInfo.InvariantCulture)); if (!string.IsNullOrEmpty(translationLang)) { try { string descriptionTranslated = await Translator.TranslateTextRequest(translatorSubscriptionKey, translatorEndpoint, result.Description.Captions[0].Text, translationLang).ConfigureAwait(false); thumbnailHighResBlob.Metadata.Add("DescriptionTranslated", Convert.ToBase64String(Encoding.UTF8.GetBytes(descriptionTranslated))); } catch (Exception exc) { ViewBag.ErrorMsg = exc.ToString(); Functions.SendProgress("Error in TranslateTextRequest:" + exc.Message, 0, 1); } } //var guidThumbnail = Path.GetFileNameWithoutExtension(thumbnailHighResBlob.Name).Substring(18); } thumbnailHighResBlob.Metadata.Add("AdjustedStart", thumbnailStartTime.ToString()); for (int i = 0; i < result.Description.Tags.Count; i++) { string key = String.Format(CultureInfo.InvariantCulture, "Tag{0}", i); thumbnailHighResBlob.Metadata.Add(key, result.Description.Tags[i]); } await thumbnailHighResBlob.SetMetadataAsync().ConfigureAwait(false); } // 100% Functions.SendProgress($"Processing {thumbnailsToProcessTimeAndId.Count} thumbnails...", 10, 10); //return RedirectToAction("Index"); }
public async Task <ActionResult> Upload(IFormFile file) { if (file != null && file.Length > 0) { // Make sure the user selected an image file if (!file.ContentType.StartsWith("image")) { TempData["Message"] = "Only image files may be uploaded"; } else { // Save the original image in the "photos" container var fileStream = file.OpenReadStream(); CloudStorageAccount account = CloudStorageAccount.Parse(m_conf.GetConnectionString("Storage")); CloudBlobClient client = account.CreateCloudBlobClient(); CloudBlobContainer container = client.GetContainerReference("photos"); CloudBlockBlob photo = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await photo.UploadFromStreamAsync(fileStream); fileStream.Seek(0L, SeekOrigin.Begin); using (var thumbnailStream = new MemoryStream()) { using (var image = new Bitmap(fileStream)) { var resized = new Bitmap(192, 192); using (var graphics = Graphics.FromImage(resized)) { graphics.CompositingQuality = CompositingQuality.HighSpeed; graphics.InterpolationMode = InterpolationMode.HighQualityBicubic; graphics.CompositingMode = CompositingMode.SourceCopy; graphics.DrawImage(image, 0, 0, 192, 192); resized.Save(thumbnailStream, ImageFormat.Png); thumbnailStream.Seek(0L, SeekOrigin.Begin); // Submit the image to Azure's Computer Vision API ComputerVisionClient vision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(m_conf.GetValue <string>("AppSettings:VisionKey")) ); vision.Endpoint = m_conf.GetValue <string>("AppSettings:VisionEndpoint"); VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Description }; var result = await vision.AnalyzeImageAsync(photo.Uri.ToString(), features); // Record the image description and tags in blob metadata photo.Metadata.Add("Caption", result.Description.Captions[0].Text); for (int i = 0; i < result.Description.Tags.Count; i++) { string key = String.Format("Tag{0}", i); photo.Metadata.Add(key, result.Description.Tags[i]); } await photo.SetMetadataAsync(); container = client.GetContainerReference("thumbnails"); CloudBlockBlob thumbnail = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await thumbnail.UploadFromStreamAsync(thumbnailStream); } } } } } // redirect back to the index action to show the form once again return(RedirectToAction("Index")); }
public async Task <ActionResult> Upload(HttpPostedFileBase file) { // Save the original image in the "photos" container CloudStorageAccount oStorageAccount = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["StorageConnectionString"]); CloudBlobClient oBlobClient = oStorageAccount.CreateCloudBlobClient(); CloudBlobContainer oStorageContainer = oBlobClient.GetContainerReference("photos"); CloudBlockBlob oPhoto = oStorageContainer.GetBlockBlobReference(Path.GetFileName(file.FileName)); string strThumbUri = string.Empty; const string uriBase = "https://southeastasia.api.cognitive.microsoft.com/vision/v1.0/analyze"; try { if (file != null && file.ContentLength > 0) { // Make sure the user selected an image file if (!file.ContentType.StartsWith("image")) { TempData["Message"] = "Only Image Files may be Uploaded!"; } else { await oPhoto.UploadFromStreamAsync(file.InputStream); #region Generate a Thumbnail using (var outputStream = new MemoryStream()) { file.InputStream.Seek(0L, SeekOrigin.Begin); var settings = new ResizeSettings { MaxWidth = 192 }; ImageBuilder.Current.Build(file.InputStream, outputStream, settings); outputStream.Seek(0L, SeekOrigin.Begin); oStorageContainer = oBlobClient.GetContainerReference("thumbnails"); CloudBlockBlob oThumbnail = oStorageContainer.GetBlockBlobReference(Path.GetFileName(file.FileName)); await oThumbnail.UploadFromStreamAsync(outputStream); strThumbUri = oThumbnail.Uri.ToString(); oThumbnail = null; } #endregion // end of Generate a Thumbnail #region Submit to Azure's Computer Vision API ComputerVisionClient oVisionClient = new ComputerVisionClient( new ApiKeyServiceClientCredentials(ConfigurationManager.AppSettings["SubscriptionKey"]), new System.Net.Http.DelegatingHandler[] { }); oVisionClient.Endpoint = ConfigurationManager.AppSettings["VisionEndpoint"]; VisualFeatureTypes[] oVisualFeatures = new VisualFeatureTypes[] { VisualFeatureTypes.Objects, VisualFeatureTypes.Tags, VisualFeatureTypes.Description, VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, VisualFeatureTypes.Color, VisualFeatureTypes.Adult, VisualFeatureTypes.Categories, VisualFeatureTypes.Brands }; var vResponse = await oVisionClient.AnalyzeImageAsync(oPhoto.Uri.ToString(), oVisualFeatures); // Record the image description and tags in blob metadata oPhoto.Metadata.Add("Caption", vResponse.Description.Captions[0].Text); for (int i = 0; i < vResponse.Description.Tags.Count; i++) { string key = String.Format("Tag{0}", i); oPhoto.Metadata.Add(key, vResponse.Description.Tags[i]); } await oPhoto.SetMetadataAsync(); #endregion // end of Submit to Azure's Computer Vision API #region Save Database if (vResponse != null) { HttpClient oHttpClient = new HttpClient(); HttpResponseMessage oHttpResponseMessage; // Request headers. oHttpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", ConfigurationManager.AppSettings["SubscriptionKey"]); // Request parameters. A third optional parameter is "details". string strParams = "visualFeatures=Categories,Tags,Description,Faces,ImageType,Color,Adult&details=&language=en"; // Request body. Posts a locally stored JPEG image. string strBody = "{\"url\":\"" + oPhoto.Uri.ToString() + "\"}"; // https://azurui.blob.core.windows.net/photos/Combine.jpg byte[] byteData = Encoding.UTF8.GetBytes(strBody); using (ByteArrayContent content = new ByteArrayContent(byteData)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); // Make the REST API call. oHttpResponseMessage = await oHttpClient.PostAsync( string.Format("{0}?{1}", uriBase, strParams), content); } // Get the JSON response. string strResponse = await oHttpResponseMessage.Content.ReadAsStringAsync(); if (oHttpResponseMessage.IsSuccessStatusCode) { DataAccess oDataAccess = new DataAccess(); StorageModel oStorageModel = new StorageModel( oPhoto.Name, vResponse.Description.Captions[0].Text, oPhoto.Uri.ToString(), strThumbUri, strResponse); oDataAccess.AddStorage(oStorageModel); oStorageModel = null; } } #endregion // end of Save Database } } } catch (Exception oException) { // In case something goes wrong TempData["Message"] = oException.Message; } finally { oStorageAccount = null; oBlobClient = null; oStorageContainer = null; oPhoto = null; } return(RedirectToAction("Index")); }
public async Task <ActionResult> Upload(HttpPostedFileBase file)//Async method so the system does not with concurrent tasks { if (file != null && file.ContentLength > 0) { // Make sure the user selected an image file if (!file.ContentType.StartsWith("image")) { TempData["Message"] = "Only image files may be uploaded";//error message if file is not of type image } else { try { // Save the original image in the "photos" container CloudStorageAccount account = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["StorageConnectionString"]);//blob connection string in webconfig CloudBlobClient client = account.CreateCloudBlobClient(); //photo.SetProperties(); CloudBlobContainer container = client.GetContainerReference("photos");//Name of container CloudBlockBlob photo = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); //set cache control property for the uploaded blob photo.Properties.CacheControl = "max-age=90, must-revalidate";//90 seconds of caching on client side await photo.UploadFromStreamAsync(file.InputStream); // Generate a thumbnail and save it in the "thumbnails" container using (var outputStream = new MemoryStream()) { file.InputStream.Seek(0L, SeekOrigin.Begin); var settings = new ResizeSettings { MaxWidth = 192 }; //Making the image bigger for display purposes ImageBuilder.Current.Build(file.InputStream, outputStream, settings); outputStream.Seek(0L, SeekOrigin.Begin); container = client.GetContainerReference("thumbnails");//Name of container CloudBlockBlob thumbnail = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await thumbnail.UploadFromStreamAsync(outputStream); } // Submit the image to Azure's Computer Vision API ComputerVisionClient vision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(ConfigurationManager.AppSettings["SubscriptionKey"]), //computer vision key in webconfig new System.Net.Http.DelegatingHandler[] { }); vision.Endpoint = ConfigurationManager.AppSettings["VisionEndpoint"]; //endpoint in webconfig VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Description }; //Analyzing the image that has been uploaded var result = await vision.AnalyzeImageAsync(photo.Uri.ToString(), features); // Record the image description and tags in blob metadata photo.Metadata.Add("Caption", result.Description.Captions[0].Text); for (int i = 0; i < result.Description.Tags.Count; i++)//loops to add tags as necessary { string key = String.Format("Tag{0}", i); photo.Metadata.Add(key, result.Description.Tags[i]); } await photo.SetMetadataAsync(); } catch (Exception ex) { // In case something goes wrong TempData["Message"] = ex.Message;//error message } } } return(RedirectToAction("Index")); }
public async Task <ActionResult> Upload(HttpPostedFileBase file) { if (file != null && file.ContentLength > 0) { // Make sure the user selected an image file if (!file.ContentType.StartsWith("image")) { TempData["Message"] = "Only image files may be uploaded"; } else { try { // Save the original image in the "photos" container CloudStorageAccount account = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["StorageConnectionString"]); CloudBlobClient client = account.CreateCloudBlobClient(); CloudBlobContainer container = client.GetContainerReference("photos"); CloudBlockBlob photo = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await photo.UploadFromStreamAsync(file.InputStream); // Generate a thumbnail and save it in the "thumbnails" container using (var outputStream = new MemoryStream()) { file.InputStream.Seek(0L, SeekOrigin.Begin); var settings = new ResizeSettings { MaxWidth = 192 }; ImageBuilder.Current.Build(file.InputStream, outputStream, settings); outputStream.Seek(0L, SeekOrigin.Begin); container = client.GetContainerReference("thumbnails"); CloudBlockBlob thumbnail = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await thumbnail.UploadFromStreamAsync(outputStream); } // Submit the image to Azure's Computer Vision API ComputerVisionClient vision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(ConfigurationManager.AppSettings["SubscriptionKey"]), new System.Net.Http.DelegatingHandler[] { }); vision.Endpoint = ConfigurationManager.AppSettings["VisionEndpoint"]; VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Description }; var result = await vision.AnalyzeImageAsync(photo.Uri.ToString(), features); // Record the image description and tags in blob metadata photo.Metadata.Add("Caption", result.Description.Captions[0].Text); for (int i = 0; i < result.Description.Tags.Count; i++) { string key = String.Format("Tag{0}", i); photo.Metadata.Add(key, result.Description.Tags[i]); } await photo.SetMetadataAsync(); } catch (Exception ex) { // In case something goes wrong TempData["Message"] = ex.Message; } } } return(RedirectToAction("Index")); }
public async Task<ActionResult> Index(List<IFormFile> imageUpload) { if (imageUpload.Count == 0) { var img = new ImageTagProcessing(); img.ImageID = 0; img.Tags = null; img.ImageURL = "../images/image.png"; return View(img); } else { string storageConnectionString = AppSettings.BloblConnectionString; if (CloudStorageAccount.TryParse(storageConnectionString, out storageAccount)) { try { // Upload Image to Blob Storage #region <<Upload to blob storage>> CloudBlobClient cloudBlobClient = storageAccount.CreateCloudBlobClient(); cloudBlobContainer = cloudBlobClient.GetContainerReference(AppSettings.BloblContainer); var filePath = imageUpload[0].FileName; using (var stream = new FileStream(filePath, FileMode.Create)) { await imageUpload[0].CopyToAsync(stream); } CloudBlockBlob cloudBlockBlob = cloudBlobContainer.GetBlockBlobReference(filePath); await cloudBlockBlob.UploadFromFileAsync(filePath); // Remove local file System.IO.File.Delete(filePath); #endregion #region <<Call Cognitive Service andGet Tags>> ComputerVisionClient vision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(AppSettings.CognitiveServiceKey), new System.Net.Http.DelegatingHandler[] { }); vision.Endpoint = AppSettings.CongnitiveEndPoint; VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Tags }; var result = await vision.AnalyzeImageAsync(cloudBlockBlob.Uri.ToString(), features); List<string> s = new List<string>(); string CommaseperatedValues = ""; for (int i = 0; i < result.Tags.Count; i++) { if (CommaseperatedValues != "") { CommaseperatedValues += ","; } CommaseperatedValues += result.Tags[i].Name.ToString(); string key = String.Format("Tag{0}", i); s.Add(result.Tags[i].Name.ToString()); cloudBlockBlob.Metadata.Add(key, result.Tags[i].Name.ToString()); } #endregion # region <<store image details in SQL table>> int imgID = 0; Image dbimg = new Image(); dbimg.Name = filePath; dbimg.Path = cloudBlockBlob.Uri.ToString(); dbimg.Tags = CommaseperatedValues; dbimg.ExternalKey = ""; dbimg.CreateDate = System.DateTime.Now; dbimg.LastModified = null; db.Add(dbimg); db.SaveChanges(); imgID = db.Image.OrderByDescending(u => u.ImageId).FirstOrDefault().ImageId; #endregion #region <<Prepare Model to return>> ImageTagProcessing img = new ImageTagProcessing(); img.ImageID = imgID; img.Tags = s; img.ImageURL = cloudBlockBlob.Uri.ToString(); #endregion return View(img); }