Exemple #1
0
        public async Task<byte[]> ConvertToGrayScale(byte[] imageBytes, int height, int width)
        {
            using (InMemoryRandomAccessStream rasStream = new InMemoryRandomAccessStream())
            {
                await rasStream.WriteAsync(imageBytes.AsBuffer());
                var decoder = await BitmapDecoder.CreateAsync(rasStream);
                var pixelData = await decoder.GetPixelDataAsync();
                var pixels = pixelData.DetachPixelData();

                if (_filter == null)
                    _filter = new ImageFilter();

                await _filter.ToGrayScale(pixels.AsBuffer());

                BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, rasStream);
                encoder.SetPixelData(decoder.BitmapPixelFormat, BitmapAlphaMode.Ignore, (uint)width, (uint)height, decoder.DpiX, decoder.DpiY, pixels);
                await encoder.FlushAsync();

                using (BinaryReader br = new BinaryReader(rasStream.AsStreamForRead()))
                {
                    rasStream.Seek(0);
                    return br.ReadBytes((int)rasStream.AsStreamForRead().Length);
                }
            }
        }
 public static async Task LoadImages()
 {
     for (int i = 0; i < NUM_ICON_SIZES; i++)
     {
         for (int j = 0; j < NUM_ICON_TYPES; j++)
         {
             string postfix;
             if (j >= 1 && j <= 8)
                 postfix = "BusDirection" + ((StopDirection)j).ToString();
             else if (j == 0)
                 postfix = "BusBase";
             else
                 postfix = j == 9 ? "BusAlert" : "BusClosed";
             postfix += (i == 0 ? "20" : "40");
             var sprite = new Sprite() { ImageUri = new Uri($"ms-appx:///Assets/Icons/{postfix}.png") };
             await sprite.Load();
             sprite.Unlock();
             //var buffer1 = new SpriteBitmapStream(sprite).GetFullBuffer();
             //using (MemoryStream stream1 = new MemoryStream(buffer1, true))
             //{
             //    BusIconStreams[i * NUM_ICON_TYPES + j] = stream1.AsRandomAccessStream();
             //}
             //var bitmap = await WriteableBitmapExtensions.FromContent(null, new Uri($"ms-appx:///Assets/Icons/{postfix}.png"));
             InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream();
             
             await sprite.Bitmap.ToStream(stream, BitmapEncoder.BmpEncoderId);
             BusIconStreams[i * NUM_ICON_TYPES + j] = stream;
             byte[] buffer = new byte[stream.Size];
             stream.AsStreamForRead().Read(buffer, 0, buffer.Length);
             for (int k = 0; k < buffer.Length; k++)
             {
                 //if (buffer[k] != buffer1[k])
                 //{
                 //    //System.Diagnostics.Debug.WriteLine($"{buffer[k]} vs {buffer1[k]}");
                 //}
             }
         }
     }
 }
 public Stream CompressBitmap(byte[] data, int w, int h)
 {
     var memStream = new InMemoryRandomAccessStream();
     Encode(data, (uint)w, (uint)h, memStream);
     var stream = memStream.AsStreamForRead();
     Logger($"Log Encode stream for bugfix:{memStream.Size} => {stream.Length} bytes");
     return stream;
 }
        /// <summary>
        /// This is an infinite loop which
        /// takes a picture with the attached camera,
        /// displays it,
        /// sends it for recognition to the Microsoft Project Oxford Face API,
        /// displays recognition results overlaid on the picture,
        /// waits for 5 seconds to allow the result to be examined,
        /// starts over.
        /// </summary>
        private async Task RunControlLoopAsync()
        {
            while (true)
            {
                // Take camera picture
                await UpdateStatusAsync("Taking still picture...");

                // TODO focus if possible
                //await mediaCapture.VideoDeviceController.FocusControl.FocusAsync();
                FaceResultsGrid.Children.Clear();
                CountdownProgressBar.Value = 100;
                CameraFlashStoryboard.Begin();

                using (var stream = new InMemoryRandomAccessStream())
                {
                    var imageEncodingProperties = ImageEncodingProperties.CreatePng();
                    imageEncodingProperties.Width = 320;
                    imageEncodingProperties.Height = 200;
                    await mediaCapture.CapturePhotoToStreamAsync(imageEncodingProperties, stream);

                    // Display camera picture
                    await UpdateStatusAsync("Displaying sample picture...");

                    stream.Seek(0);
                    var bitmapImage = new BitmapImage();
                    await bitmapImage.SetSourceAsync(stream);
                    ResultImage.Source = bitmapImage;

                    // Send picture for recognition
                    // We need to encode the raw image as a JPEG to make sure the service can recognize it.
                    await UpdateStatusAsync("Uploading picture to Microsoft Project Oxford Face API...");
                    stream.Seek(0);

                    var recognizedFaces = await GetFaces(stream.AsStreamForRead());
                    // Display recognition results
                    // Wait a few seconds seconds to give viewers a chance to appreciate all we've done
                    await UpdateStatusAsync($"{recognizedFaces.Count()} face(s) found by Microsoft 'Project Oxford' Face API");

                    // The face rectangles received from Face API are measured in pixels of the raw image.
                    // We need to calculate the extra scaling and displacement that results from the raw image
                    // being displayed in a larger container.
                    // We use the FaceResultsGrid as a basis for the calculation, because the ResultImage control's ActualHeight and ActualWidth
                    // properties have the same aspect ratio as the image, and not the aspect ratio of the screen.
                    double widthScaleFactor = FaceResultsGrid.ActualWidth / bitmapImage.PixelWidth;
                    double heightScaleFactor = FaceResultsGrid.ActualHeight / bitmapImage.PixelHeight;
                    double scaleFactor = Math.Min(widthScaleFactor, heightScaleFactor);

                    bool isTheBlackSpaceOnTheLeft = widthScaleFactor > heightScaleFactor;
                    double extraLeftNeeded = 0;
                    double extraTopNeeded = 0;
                    if (isTheBlackSpaceOnTheLeft) extraLeftNeeded = (FaceResultsGrid.ActualWidth - scaleFactor * bitmapImage.PixelWidth) / 2;
                    else extraTopNeeded = (FaceResultsGrid.ActualHeight - scaleFactor * bitmapImage.PixelHeight) / 2;

                    foreach (var face in recognizedFaces)
                    {
                        var faceOutlineRectangleLeft = extraLeftNeeded + scaleFactor * face.FaceRectangle.Left;
                        var faceOutlineRectangleTop = extraTopNeeded + scaleFactor * face.FaceRectangle.Top;
                        var faceOutlineRectangleHeight = scaleFactor * face.FaceRectangle.Height;
                        var faceOutlineRectangleWidth = scaleFactor * face.FaceRectangle.Width;

                        Rectangle faceOutlineRectangle = new Rectangle();
                        faceOutlineRectangle.Stroke = new SolidColorBrush(Colors.Black);
                        faceOutlineRectangle.StrokeThickness = 3;
                        faceOutlineRectangle.HorizontalAlignment = HorizontalAlignment.Left;
                        faceOutlineRectangle.VerticalAlignment = VerticalAlignment.Top;
                        faceOutlineRectangle.Margin = new Thickness(faceOutlineRectangleLeft, faceOutlineRectangleTop, 0, 0);
                        faceOutlineRectangle.Height = faceOutlineRectangleHeight;
                        faceOutlineRectangle.Width = faceOutlineRectangleWidth;
                        FaceResultsGrid.Children.Add(faceOutlineRectangle);

                        TextBlock faceInfoTextBlock = new TextBlock();
                        faceInfoTextBlock.Foreground = new SolidColorBrush(Colors.White);
                        faceInfoTextBlock.FontSize = 30;
                        faceInfoTextBlock.Text = $"{face.Attributes.Gender}, {face.Attributes.Age}";
                        Border faceInfoBorder = new Border();
                        faceInfoBorder.Background = new SolidColorBrush(Colors.Black);
                        faceInfoBorder.Padding = new Thickness(5);
                        faceInfoBorder.Child = faceInfoTextBlock;
                        faceInfoBorder.HorizontalAlignment = HorizontalAlignment.Left;
                        faceInfoBorder.VerticalAlignment = VerticalAlignment.Top;
                        faceInfoBorder.Margin = new Thickness(faceOutlineRectangleLeft, faceOutlineRectangleTop - 50, 0, 0);
                        FaceResultsGrid.Children.Add(faceInfoBorder);

                        TextBlock carInfoTextBlock = new TextBlock();
                        carInfoTextBlock.Foreground = new SolidColorBrush(Colors.White);
                        carInfoTextBlock.FontSize = 30;
                        carInfoTextBlock.Text = GetCarRecommendation(face.Attributes.Gender, (int)face.Attributes.Age);
                        Border carInfoBorder = new Border();
                        carInfoBorder.Background = new SolidColorBrush(Colors.Black);
                        carInfoBorder.Padding = new Thickness(5);
                        carInfoBorder.Child = carInfoTextBlock;
                        carInfoBorder.HorizontalAlignment = HorizontalAlignment.Left;
                        carInfoBorder.VerticalAlignment = VerticalAlignment.Top;
                        carInfoBorder.Margin = new Thickness(faceOutlineRectangleLeft, faceOutlineRectangleTop + faceOutlineRectangleHeight, 0, 0);
                        FaceResultsGrid.Children.Add(carInfoBorder);
                    }
                }

                CountdownStoryboard.Begin();
                await Task.Delay(ControlLoopDelayMilliseconds);
            }
        }
        private async void AnalyzeButton_Click(object sender, RoutedEventArgs e)
        {
            if (processingImage)
            {
                // Ignore button presses while processing the image
                return;
            }

            if (inCaptureState)
            {
                processingImage = true;
                inCaptureState = false;

                // Make the 'Processing...' label visible
                canvasControl.Visibility = Visibility.Visible;
                AnalyzeButton.Content = "...";

                canvasControl.Invalidate();

                var originalPhoto = new InMemoryRandomAccessStream();
                var reencodedPhoto = new InMemoryRandomAccessStream();
                await mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), originalPhoto);
                await originalPhoto.FlushAsync();
                originalPhoto.Seek(0);

                captureElement.Visibility = Visibility.Collapsed;

                // Store the captured photo as a Win2D type for later use
                photoCanvasBitmap = await CanvasBitmap.LoadAsync(canvasControl, originalPhoto);

                // Send the photo to Project Oxford to detect the faces
                lastCapturedFaces = await faceServiceClient.DetectAsync(originalPhoto.AsStreamForRead(), true, true, true, false);

                // Force the canvasControl to be redrawn now that the photo is available
                canvasControl.Invalidate();

                processingImage = false;
                AnalyzeButton.Content = "Restart";
            }
            else
            {
                canvasControl.Visibility = Visibility.Collapsed;
                captureElement.Visibility = Visibility.Visible;
                AnalyzeButton.Content = "Capture Photo";

                photoCanvasBitmap = null;
                canvasControl.Invalidate();

                inCaptureState = true;
            }
        }
        /// <summary>
        /// Uploads a photo as a WriteableBitmap. This methods converts the given bitmap to a PNG file before sending it to the server.
        /// </summary>
        /// <param name="uri"></param>
        /// <param name="bmp"></param>
        /// <returns></returns>
        public static async Task UploadImage(string uri, WriteableBitmap bmp)
        {
            InMemoryRandomAccessStream memoryStream = new InMemoryRandomAccessStream();
            BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, memoryStream);
            encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied, (uint)bmp.PixelWidth, (uint)bmp.PixelHeight, 96, 96, bmp.PixelBuffer.ToArray());
            await encoder.FlushAsync();

            Stream stream = memoryStream.AsStreamForRead();
            byte[] pngBuffer = new byte[stream.Length];
            stream.Read(pngBuffer, 0, pngBuffer.Length);

            await UploadImage(uri, pngBuffer);
        }
Exemple #7
0
        /// <summary>
        /// Encodes the specified bitmap data and outputs it to the specified
        /// <c>BinaryWriter</c>. Bitmap data should be in BGRA format.
        /// For internal use only.
        /// </summary>
        public async Task EncodeAsync(byte[] bytes, BinaryWriter writer)
        {
#if NETFX_CORE
            using (var jpegStream = new InMemoryRandomAccessStream())
            {
                var propertySet = new BitmapPropertySet();
                var qualityValue = new BitmapTypedValue(this.JpegQuality / 100.0, PropertyType.Single);
                propertySet.Add("ImageQuality", qualityValue);

                var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, jpegStream, propertySet);
                if (this.Width != this.OutputWidth)
                {
                    encoder.BitmapTransform.ScaledWidth = (uint)this.OutputWidth;
                    encoder.BitmapTransform.ScaledHeight = (uint)this.OutputHeight;
                }

                encoder.SetPixelData(
                    BitmapPixelFormat.Bgra8,
                    BitmapAlphaMode.Straight,
                    (uint)this.Width,
                    (uint)this.Height,
                    96,
                    96,
                    bytes);
                await encoder.FlushAsync();

                if (writer.BaseStream == null || writer.BaseStream.CanWrite == false)
                    return;

                // Header
                writer.Write(this.OutputWidth);
                writer.Write(this.OutputHeight);
                writer.Write((int)jpegStream.Size);

                // Data
                jpegStream.AsStreamForRead().CopyTo(writer.BaseStream);
            }
#else
            await Task.Run(() =>
            {
                var format = PixelFormats.Bgra32;
                int stride = (int)this.Width * format.BitsPerPixel / 8;
                var bmp = BitmapSource.Create(
                    this.Width,
                    this.Height,
                    96.0,
                    96.0,
                    format,
                    null,
                    bytes,
                    stride);
                BitmapFrame frame;
                if (this.Width != this.OutputWidth || this.Height != this.OutputHeight)
                {
                    var transform = new ScaleTransform((double)this.OutputHeight / this.Height, (double)this.OutputHeight / this.Height);
                    var scaledbmp = new TransformedBitmap(bmp, transform);
                    frame = BitmapFrame.Create(scaledbmp);
                }
                else
                {
                    frame = BitmapFrame.Create(bmp);
                }

                var encoder = new JpegBitmapEncoder()
                {
                    QualityLevel = this.JpegQuality
                };
                encoder.Frames.Add(frame);
                using (var jpegStream = new MemoryStream())
                {
                    encoder.Save(jpegStream);

                    if (writer.BaseStream == null || writer.BaseStream.CanWrite == false)
                        return;

                    // Header
                    writer.Write(this.OutputWidth);
                    writer.Write(this.OutputHeight);
                    writer.Write((int)jpegStream.Length);

                    // Data
                    jpegStream.Position = 0;
                    jpegStream.CopyTo(writer.BaseStream);
                }
            });
#endif
        }
Exemple #8
0
		async Task<string> CaptureAndAnalyze(bool readText = false)
		{
			var imgFormat = ImageEncodingProperties.CreateJpeg();

			//NOTE: this is how you can save a frame to the CameraRoll folder:
			//var file = await KnownFolders.CameraRoll.CreateFileAsync($"MCS_Photo{DateTime.Now:HH-mm-ss}.jpg", CreationCollisionOption.GenerateUniqueName);
			//await mediaCapture.CapturePhotoToStorageFileAsync(imgFormat, file);
			//var stream = await file.OpenStreamForReadAsync();

			// Capture a frame and put it to MemoryStream
			var memoryStream = new MemoryStream();
			using (var ras = new InMemoryRandomAccessStream())
			{
				await mediaCapture.CapturePhotoToStreamAsync(imgFormat, ras);
				ras.Seek(0);
				using (var stream = ras.AsStreamForRead())
					stream.CopyTo(memoryStream);
			}

			var imageBytes = memoryStream.ToArray();
			memoryStream.Position = 0;

			if (withPreview)
			{
				InvokeOnMain(() =>
					{
						var image = new Image();
						image.Load(new Urho.MemoryBuffer(imageBytes));

						Node child = Scene.CreateChild();
						child.Position = LeftCamera.Node.WorldPosition + LeftCamera.Node.WorldDirection * 2f;
						child.LookAt(LeftCamera.Node.WorldPosition, Vector3.Up, TransformSpace.World);

						child.Scale = new Vector3(1f, image.Height / (float)image.Width, 0.1f) / 10;
						var texture = new Texture2D();
						texture.SetData(image, true);

						var material = new Material();
						material.SetTechnique(0, CoreAssets.Techniques.Diff, 0, 0);
						material.SetTexture(TextureUnit.Diffuse, texture);

						var box = child.CreateComponent<Box>();
						box.SetMaterial(material);

						child.RunActions(new EaseBounceOut(new ScaleBy(1f, 5)));
					});
			}
			
			try
			{
				var client = new VisionServiceClient(VisionApiKey);
				if (readText)
				{
					var ocrResult = await client.RecognizeTextAsync(memoryStream, detectOrientation: false);
					var words = ocrResult.Regions.SelectMany(region => region.Lines).SelectMany(line => line.Words).Select(word => word.Text);
					return "it says: " + string.Join(" ", words);
				}
				else
				{
					// just describe the picture, you can also use cleint.AnalyzeImageAsync method to get more info
					var result = await client.DescribeAsync(memoryStream);
					return result?.Description?.Captions?.FirstOrDefault()?.Text;
				}
			}
			catch (ClientException exc)
			{
				return exc?.Error?.Message ?? "Failed";
			}
			catch (Exception exc)
			{
				return "Failed";
			}
		}
        private async Task ImageUpdate(bool isCommand)
        {
            DateTime currentTime = DateTime.UtcNow;

            // Just incase - stop code being called while photo already in progress
            if (this.cameraBusy)
            {
                return;
            }
            this.cameraBusy = true;
            this.displayGpioPin.Write(GpioPinValue.High);

            // Check that enough time has passed for picture to be taken
            if ((currentTime - this.imageLastCapturedAtUtc) < this.debounceTimeout)
            {
                this.displayOffTimer.Change(this.timerPeriodDetectIlluminated, this.timerPeriodInfinite);
                return;
            }

            this.imageLastCapturedAtUtc = currentTime;

            try
            {
                ImagePrediction imagePrediction;

                using (Windows.Storage.Streams.InMemoryRandomAccessStream captureStream = new Windows.Storage.Streams.InMemoryRandomAccessStream())
                {
                    this.mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream).AsTask().Wait();
                    captureStream.FlushAsync().AsTask().Wait();
                    captureStream.Seek(0);

                    IStorageFile photoFile = await KnownFolders.PicturesLibrary.CreateFileAsync(ImageFilename, CreationCollisionOption.ReplaceExisting);

                    ImageEncodingProperties imageProperties = ImageEncodingProperties.CreateJpeg();
                    await this.mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photoFile);

                    switch (modelType)
                    {
                    case ModelType.Classification:
                        imagePrediction = await this.customVisionClient.ClassifyImageAsync(this.projectId, this.modelPublishedName, captureStream.AsStreamForRead());

                        break;

                    case ModelType.Detection:
                        imagePrediction = await this.customVisionClient.DetectImageAsync(this.projectId, this.modelPublishedName, captureStream.AsStreamForRead());

                        break;

                    default:
                        throw new ArgumentException("ModelType Invalid");
                    }
                    Debug.WriteLine($"Prediction count {imagePrediction.Predictions.Count}");
                }

                JObject       telemetryDataPoint = new JObject();
                LoggingFields imageInformation   = new LoggingFields();

                imageInformation.AddDateTime("TakenAtUTC", currentTime);
                imageInformation.AddBoolean("IsCommand", isCommand);
                imageInformation.AddDouble("Probability threshold", probabilityThreshold);
                imageInformation.AddInt32("Predictions", imagePrediction.Predictions.Count);

                // Display and log the results of the prediction
                foreach (var prediction in imagePrediction.Predictions)
                {
                    Debug.WriteLine($" Tag:{prediction.TagName} {prediction.Probability}");
                    imageInformation.AddDouble($"Tag:{prediction.TagName}", prediction.Probability);
                }

                // Post process the predictions based on the type of model
                switch (modelType)
                {
                case ModelType.Classification:
                    // Use only the tags above the specified minimum probability
                    foreach (var prediction in imagePrediction.Predictions)
                    {
                        if (prediction.Probability >= probabilityThreshold)
                        {
                            // Display and log the individual tag probabilities
                            Debug.WriteLine($" Tag valid:{prediction.TagName} {prediction.Probability:0.00}");
                            imageInformation.AddDouble($"Tag valid:{prediction.TagName}", prediction.Probability);

                            telemetryDataPoint.Add(prediction.TagName, prediction.Probability);
                        }
                    }
                    break;

                case ModelType.Detection:
                    // Group the tags to get the count, include only the predictions above the specified minimum probability
                    var groupedPredictions = from prediction in imagePrediction.Predictions
                                             where prediction.Probability >= probabilityThreshold
                                             group prediction by new { prediction.TagName }
                    into newGroup
                        select new
                    {
                        TagName = newGroup.Key.TagName,
                        Count   = newGroup.Count(),
                    };

                    // Display and log the agregated predictions
                    foreach (var prediction in groupedPredictions)
                    {
                        Debug.WriteLine($" Tag valid:{prediction.TagName} {prediction.Count}");
                        imageInformation.AddInt32($"Tag valid:{prediction.TagName}", prediction.Count);
                        telemetryDataPoint.Add(prediction.TagName, prediction.Count);
                    }
                    break;

                default:
                    throw new ArgumentException("ModelType Invalid");
                }

                this.logging.LogEvent("Captured image processed by Cognitive Services", imageInformation);

                try
                {
                    using (Message message = new Message(Encoding.ASCII.GetBytes(JsonConvert.SerializeObject(telemetryDataPoint))))
                    {
                        Debug.WriteLine(" {0:HH:mm:ss} AzureIoTHubClient SendEventAsync start", DateTime.UtcNow);
                        await this.azureIoTHubClient.SendEventAsync(message);

                        Debug.WriteLine(" {0:HH:mm:ss} AzureIoTHubClient SendEventAsync finish", DateTime.UtcNow);
                    }
                    this.logging.LogEvent("SendEventAsync payload", imageInformation, LoggingLevel.Information);
                }
                catch (Exception ex)
                {
                    imageInformation.AddString("Exception", ex.ToString());
                    this.logging.LogEvent("SendEventAsync payload", imageInformation, LoggingLevel.Error);
                }
            }
            catch (Exception ex)
            {
                this.logging.LogMessage("Camera photo or save failed " + ex.Message, LoggingLevel.Error);
            }
            finally
            {
                this.displayGpioPin.Write(GpioPinValue.Low);
                this.cameraBusy = false;
            }
        }
        private async Task ImageUpdate(bool isCommand)
        {
            DateTime currentTime = DateTime.UtcNow;

            // Just incase - stop code being called while photo already in progress
            if (this.cameraBusy)
            {
                return;
            }
            this.cameraBusy = true;

            try
            {
                using (Windows.Storage.Streams.InMemoryRandomAccessStream captureStream = new Windows.Storage.Streams.InMemoryRandomAccessStream())
                {
                    await this.mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream);

                    await captureStream.FlushAsync();

#if DEBUG
                    IStorageFile photoFile = await KnownFolders.PicturesLibrary.CreateFileAsync(ImageFilenameLocal, CreationCollisionOption.ReplaceExisting);

                    ImageEncodingProperties imageProperties = ImageEncodingProperties.CreateJpeg();
                    await this.mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photoFile);
#endif

                    string azureFilenameLatest  = string.Format(this.azureStorageimageFilenameLatestFormat, currentTime);
                    string azureFilenameHistory = string.Format(this.azureStorageImageFilenameHistoryFormat, currentTime);

                    LoggingFields imageInformation = new LoggingFields();
                    imageInformation.AddDateTime("TakenAtUTC", currentTime);
                    imageInformation.AddBoolean("IsCommand", isCommand);
#if DEBUG
                    imageInformation.AddString("LocalFilename", photoFile.Path);
#endif
                    imageInformation.AddString("AzureFilenameLatest", azureFilenameLatest);
                    imageInformation.AddString("AzureFilenameHistory", azureFilenameHistory);
                    this.logging.LogEvent("Saving image(s) to Azure storage", imageInformation);

                    // Update the latest image in storage
                    if (!string.IsNullOrWhiteSpace(azureFilenameLatest))
                    {
                        captureStream.Seek(0);
                        Debug.WriteLine("AzureIoT Hub latest image upload start");
                        await this.azureIoTHubClient.UploadToBlobAsync(azureFilenameLatest, captureStream.AsStreamForRead());

                        Debug.WriteLine("AzureIoT Hub latest image upload done");
                    }

                    // Upload the historic image to storage
                    if (!string.IsNullOrWhiteSpace(azureFilenameHistory))
                    {
                        captureStream.Seek(0);
                        Debug.WriteLine("AzureIoT Hub historic image upload start");
                        await this.azureIoTHubClient.UploadToBlobAsync(azureFilenameHistory, captureStream.AsStreamForRead());

                        Debug.WriteLine("AzureIoT Hub historic image upload done");
                    }
                }
            }
            catch (Exception ex)
            {
                this.logging.LogMessage("Image capture or AzureIoTHub storage upload failed " + ex.Message, LoggingLevel.Error);
            }
            finally
            {
                this.cameraBusy = false;
            }
        }