Exemplo n.º 1
0
        async Task <bool> RecognizeFace()
        {
            Log.Trace($"BEFORE {GetType().Name}.RecognizeFace()", Log.LogFlag.Debug);
            long currMilliseconds = DateTimeOffset.Now.ToUnixTimeMilliseconds();

            if (currMilliseconds - 1000L < Interlocked.Exchange(ref lastRecognizingTimeStampMillis, currMilliseconds))
            {
                Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE will wait", Log.LogFlag.Debug);
                await Task.Delay(TimeSpan.FromMilliseconds(1000));

                Interlocked.Exchange(ref lastRecognizingTimeStampMillis, DateTimeOffset.Now.ToUnixTimeMilliseconds());
                Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER will wait", Log.LogFlag.Debug);
            }

            if (!IsFacePresent)
            {
                Log.Trace($"AFTER {GetType().Name}.RecognizeFace(): IsFacePresent='false'", Log.LogFlag.Debug);
                return(false);
            }

            Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE check Event state", Log.LogFlag.Debug);
            if (RE.State.ContainsKey("Event"))
            {
                if (RE.State["Event"] == "FacePreOut")
                {
                    RE.SetVar("Event", "FaceIn");
                }
            }
            Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER check Event state", Log.LogFlag.Debug);

            FaceWaitTimer.Stop();

            var photoAsStream = new MemoryStream();

            Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE capture a photo", Log.LogFlag.Debug);
            await MC.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), photoAsStream.AsRandomAccessStream());

            Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER capture a photo, BEFORE serialized", Log.LogFlag.Debug);
            byte[] photoAsByteArray = photoAsStream.ToArray();
            Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER capture a photo, AFTER serialized", Log.LogFlag.Debug);


            Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE ProcessPhotoAsync()", Log.LogFlag.Debug);
            PhotoInfoDTO photoInfo = await ProcessPhotoAsync(photoAsByteArray, Config.RecognizeEmotions);

            Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER ProcessPhotoAsync()", Log.LogFlag.Debug);
            if (photoInfo.FoundAndProcessedFaces)
            {
                if (photoInfo.Age == "offline")
                {
                    RE.SetVar("FaceCount", "1");
                    RE.SetVar("Gender", "offline");
                    RE.SetVar("Age", "-1");
                    RE.SetVar("Emotion", "offline");
                    RE.SetVar("offline", "True");
                }
                else
                {
                    RE.SetVar("FaceCount", photoInfo.FaceCountAsString);
                    RE.SetVar("Gender", photoInfo.Gender);
                    RE.SetVar("Age", photoInfo.Age);
                    if (Config.RecognizeEmotions)
                    {
                        RE.SetVar("Emotion", photoInfo.Emotion);
                    }
                }

                Log.Trace($"AFTER {GetType().Name}.RecognizeFace(): FaceCount='{RE.State.Eval("FaceCount")}', " +
                          $"Age='{RE.State.Eval("Age")}', Gender='{RE.State.Eval("Gender")}', Emotion='{RE.State.Eval("Emotion")}'",
                          Log.LogFlag.Debug);
                return(true);
            }
            else
            {
                FaceWaitTimer.Start();
                Log.Trace($"AFTER {GetType().Name}.RecognizeFace(): FaceCount='0'", Log.LogFlag.Debug);
                return(false);
            }
        }
        private async Task ImageUpdate(bool isCommand)
        {
            DateTime currentTime = DateTime.UtcNow;

            // Just incase - stop code being called while photo already in progress
            if (this.cameraBusy)
            {
                return;
            }
            this.cameraBusy = true;
            this.displayGpioPin.Write(GpioPinValue.High);

            // Check that enough time has passed for picture to be taken
            if ((currentTime - this.imageLastCapturedAtUtc) < this.debounceTimeout)
            {
                this.displayOffTimer.Change(this.timerPeriodDetectIlluminated, this.timerPeriodInfinite);
                return;
            }

            this.imageLastCapturedAtUtc = currentTime;

            try
            {
                ImagePrediction imagePrediction;

                using (Windows.Storage.Streams.InMemoryRandomAccessStream captureStream = new Windows.Storage.Streams.InMemoryRandomAccessStream())
                {
                    this.mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream).AsTask().Wait();
                    captureStream.FlushAsync().AsTask().Wait();
                    captureStream.Seek(0);

                    IStorageFile photoFile = await KnownFolders.PicturesLibrary.CreateFileAsync(ImageFilename, CreationCollisionOption.ReplaceExisting);

                    ImageEncodingProperties imageProperties = ImageEncodingProperties.CreateJpeg();
                    await this.mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photoFile);

                    switch (modelType)
                    {
                    case ModelType.Classification:
                        imagePrediction = await this.customVisionClient.ClassifyImageAsync(this.projectId, this.modelPublishedName, captureStream.AsStreamForRead());

                        break;

                    case ModelType.Detection:
                        imagePrediction = await this.customVisionClient.DetectImageAsync(this.projectId, this.modelPublishedName, captureStream.AsStreamForRead());

                        break;

                    default:
                        throw new ArgumentException("ModelType Invalid");
                    }
                    Debug.WriteLine($"Prediction count {imagePrediction.Predictions.Count}");
                }

                JObject       telemetryDataPoint = new JObject();
                LoggingFields imageInformation   = new LoggingFields();

                imageInformation.AddDateTime("TakenAtUTC", currentTime);
                imageInformation.AddBoolean("IsCommand", isCommand);
                imageInformation.AddDouble("Probability threshold", probabilityThreshold);
                imageInformation.AddInt32("Predictions", imagePrediction.Predictions.Count);

                // Display and log the results of the prediction
                foreach (var prediction in imagePrediction.Predictions)
                {
                    Debug.WriteLine($" Tag:{prediction.TagName} {prediction.Probability}");
                    imageInformation.AddDouble($"Tag:{prediction.TagName}", prediction.Probability);
                }

                // Post process the predictions based on the type of model
                switch (modelType)
                {
                case ModelType.Classification:
                    // Use only the tags above the specified minimum probability
                    foreach (var prediction in imagePrediction.Predictions)
                    {
                        if (prediction.Probability >= probabilityThreshold)
                        {
                            // Display and log the individual tag probabilities
                            Debug.WriteLine($" Tag valid:{prediction.TagName} {prediction.Probability:0.00}");
                            imageInformation.AddDouble($"Tag valid:{prediction.TagName}", prediction.Probability);

                            telemetryDataPoint.Add(prediction.TagName, prediction.Probability);
                        }
                    }
                    break;

                case ModelType.Detection:
                    // Group the tags to get the count, include only the predictions above the specified minimum probability
                    var groupedPredictions = from prediction in imagePrediction.Predictions
                                             where prediction.Probability >= probabilityThreshold
                                             group prediction by new { prediction.TagName }
                    into newGroup
                        select new
                    {
                        TagName = newGroup.Key.TagName,
                        Count   = newGroup.Count(),
                    };

                    // Display and log the agregated predictions
                    foreach (var prediction in groupedPredictions)
                    {
                        Debug.WriteLine($" Tag valid:{prediction.TagName} {prediction.Count}");
                        imageInformation.AddInt32($"Tag valid:{prediction.TagName}", prediction.Count);
                        telemetryDataPoint.Add(prediction.TagName, prediction.Count);
                    }
                    break;

                default:
                    throw new ArgumentException("ModelType Invalid");
                }

                this.logging.LogEvent("Captured image processed by Cognitive Services", imageInformation);

                try
                {
                    using (Message message = new Message(Encoding.ASCII.GetBytes(JsonConvert.SerializeObject(telemetryDataPoint))))
                    {
                        Debug.WriteLine(" {0:HH:mm:ss} AzureIoTHubClient SendEventAsync start", DateTime.UtcNow);
                        await this.azureIoTHubClient.SendEventAsync(message);

                        Debug.WriteLine(" {0:HH:mm:ss} AzureIoTHubClient SendEventAsync finish", DateTime.UtcNow);
                    }
                    this.logging.LogEvent("SendEventAsync payload", imageInformation, LoggingLevel.Information);
                }
                catch (Exception ex)
                {
                    imageInformation.AddString("Exception", ex.ToString());
                    this.logging.LogEvent("SendEventAsync payload", imageInformation, LoggingLevel.Error);
                }
            }
            catch (Exception ex)
            {
                this.logging.LogMessage("Camera photo or save failed " + ex.Message, LoggingLevel.Error);
            }
            finally
            {
                this.displayGpioPin.Write(GpioPinValue.Low);
                this.cameraBusy = false;
            }
        }
Exemplo n.º 3
0
        //scann the picture every x seconds
        private async void Timer_Tick(object sender, object e)
        {
            timer.Stop();
            textBlockSessionInfo.Text = "point @ QR code";

            ImageEncodingProperties imgFormat = ImageEncodingProperties.CreateJpeg();
            // create storage file in local app storage
            StorageFile file = await ApplicationData.Current.LocalFolder.CreateFileAsync(
                "temp.jpg",
                CreationCollisionOption.GenerateUniqueName);

            //await camera.AutoFocus();

            // take photo
            await camera.captureMgr.CapturePhotoToStorageFileAsync(imgFormat, file);

            // Get photo as a BitmapImage
            BitmapImage bmpImage = new BitmapImage(new Uri(file.Path));

            bmpImage.CreateOptions = BitmapCreateOptions.IgnoreImageCache;

            WriteableBitmap wrb;

            ZXing.BarcodeReader br;
            Result res;

            using (IRandomAccessStream fileStream = await file.OpenAsync(FileAccessMode.Read))
            {
                wrb = await Windows.UI.Xaml.Media.Imaging.BitmapFactory.New(1, 1).FromStream(fileStream);
            }

            br = new BarcodeReader {
                PossibleFormats = new BarcodeFormat[] { BarcodeFormat.QR_CODE }
            };
            br.AutoRotate        = true;
            br.Options.TryHarder = true;


            res = br.Decode(wrb.ToByteArray(), wrb.PixelWidth, wrb.PixelWidth, RGBLuminanceSource.BitmapFormat.RGBA32);

            if (res != null)
            {
                try
                {
                    qrdata = Newtonsoft.Json.JsonConvert.DeserializeObject <QRCodeData>(res.Text);
                    textBoxSessionId.Text = qrdata.uuid;

                    textBlockSessionInfo.Text      = qrdata.text;
                    textBlockSessionInfoExtra.Text = res.Text;

                    await camera.captureMgr.StopPreviewAsync();

                    capturePreview.Visibility = Visibility.Collapsed;
                }
                catch (Exception jsonEx)
                {
                    textBoxSessionId.Text = "error " + jsonEx.Message;
                    timer.Start();
                }
            }
            else
            {
                timer.Start();
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Captures a photo and sends it off for analysis
        /// </summary>
        private async void TakePhoto()
        {
            try
            {
                lowLagCapture =
                    await MediaCaptureElement.PrepareLowLagPhotoCaptureAsync(ImageEncodingProperties.CreateUncompressed(MediaPixelFormat.Bgra8));

                CapturedPhoto capturedPhoto = await lowLagCapture.CaptureAsync();

                SoftwareBitmap softwareBitmap = capturedPhoto.Frame.SoftwareBitmap;

                await lowLagCapture.FinishAsync();

                byte[] imageBytes = new byte[4 * softwareBitmap.PixelWidth * softwareBitmap.PixelHeight];
                softwareBitmap.CopyToBuffer(imageBytes.AsBuffer());

                bool isAlert = CheckForMotion(imageBytes);

                if (isAlert)
                {
                    WriteableBitmap writeableBitmap = new WriteableBitmap(softwareBitmap.PixelWidth, softwareBitmap.PixelHeight);
                    softwareBitmap.CopyToBuffer(writeableBitmap.PixelBuffer);
                    AlertDisplayImages.Add(new AlertDisplayImageModel()
                    {
                        AlertDisplayImage = writeableBitmap, AlertDisplayCaption = DateTime.Now.ToString()
                    });

                    delayTimer.Interval = new TimeSpan(0, 0, ConfigurationSettings.AppConfig.AlertDelay);

                    captureTimer.Stop();
                    delayTimer.Tick += OnDelayTimerTick;
                    delayTimer.Start();

                    // It seems silly that we need to capture a second image but the first image that was captured isn't in a format that can
                    // be easily emailed. This being the case, I decided it'd just be easier to grab another capture in the correct format and
                    // email it off. The delta between the images is negligable
                    var stream = new InMemoryRandomAccessStream();
                    await MediaCaptureElement.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), stream);

                    await Task.Delay(10);

                    streamList.Add(stream);

                    if (AutoSaveAlertImages)
                    {
                    }

                    if (ConfigurationSettings.AppConfig.SendEmails && streamList.Count > ConfigurationSettings.AppConfig.AlertThreshold)
                    {
                        captureTimer.Stop();
                        await SendAlertEmail(streamList);

                        await Task.Delay(new TimeSpan(0, 1, 0));
                    }
                }
            }
            catch (Exception error)
            {
                // Getting random COM errors. Just eat it and continue. There's nothing I can do about this.
            }
        }
Exemplo n.º 5
0
        /// <summary>
        /// Retrieves the count of detected faces
        /// </summary>
        /// <param name="faces">The list of detected faces from the FaceDetected event of the effect</param>
        private async void CountDetectedFaces(IReadOnlyList <DetectedFace> faces)
        {
            FaceCount = $"{_detectionString} {faces.Count.ToString()}";

            // If we detect any faces, kill our no faces timer
            if (faces.Count != 0)
            {
                if (_noFacesTimer != null)
                {
                    _noFacesTimer.Dispose();
                }
            }
            // Otherwise, if we are filtering and don't have a timer
            else if (_currentlyFiltered && (_noFacesTimer == null))
            {
                // Create a callback
                TimerCallback noFacesCallback = (object stateInfo) =>
                {
                    _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                    {
                        OnFilterOnFace(_unfilteredName);
                        _noFacesTimer = null;
                    });
                    _noFacesTimer.Dispose();
                };

                // Set our timer
                _noFacesTimer = new Timer(noFacesCallback, null, NoFacesTime, Timeout.Infinite);
            }

            // We are also going to take an image the first time that we detect exactly one face.
            // Sidenote - to avoid a race-condition, I had to use a boolean. Just checking for _faceCaptureStill == null could produce an error.
            if ((faces.Count == 1) && !_holdForTimer && !_currentlyFiltered)
            {
                // Kick off the timer so we don't keep taking pictures, but will resubmit if we are not filtered
                _holdForTimer = true;

                // Take the picture
                _faceCaptureStill = await ApplicationData.Current.LocalFolder.CreateFileAsync("FaceDetected.jpg", CreationCollisionOption.ReplaceExisting);

                await _mediaCapture.CapturePhotoToStorageFileAsync(ImageEncodingProperties.CreateJpeg(), _faceCaptureStill);


                if (((App)Application.Current).AppSettings.FaceApiKey != "" && FacialSimilarity.InitialTrainingPerformed)
                {
                    var UserName = await FacialSimilarity.CheckForUserAsync(new Uri("ms-appdata:///local/FaceDetected.jpg"));

                    if (UserName != "")
                    {
                        OnFilterOnFace(UserName);
                    }
                }

                // Allow the camera to take another picture in 10 seconds
                TimerCallback callback = (Object stateInfo) =>
                {
                    // Now that the timer is expired, we no longer need to hold
                    // Nothing else to do since the timer will be restarted when the picture is taken
                    _holdForTimer = false;
                    if (_pictureTimer != null)
                    {
                        _pictureTimer.Dispose();
                    }
                };
                _pictureTimer = new Timer(callback, null, 10000, Timeout.Infinite);
            }
        }
Exemplo n.º 6
0
        async Task <string> CaptureAndAnalyze(bool readText = false)
        {
            var imgFormat = ImageEncodingProperties.CreateJpeg();

            //NOTE: this is how you can save a frame to the CameraRoll folder:
            //var file = await KnownFolders.CameraRoll.CreateFileAsync($"MCS_Photo{DateTime.Now:HH-mm-ss}.jpg", CreationCollisionOption.GenerateUniqueName);
            //await mediaCapture.CapturePhotoToStorageFileAsync(imgFormat, file);
            //var stream = await file.OpenStreamForReadAsync();

            // Capture a frame and put it to MemoryStream
            var memoryStream = new MemoryStream();

            using (var ras = new InMemoryRandomAccessStream())
            {
                await mediaCapture.CapturePhotoToStreamAsync(imgFormat, ras);

                ras.Seek(0);
                using (var stream = ras.AsStreamForRead())
                    stream.CopyTo(memoryStream);
            }

            var imageBytes = memoryStream.ToArray();

            memoryStream.Position = 0;

            if (withPreview)
            {
                InvokeOnMain(() =>
                {
                    var image = new Image();
                    image.Load(new Urho.MemoryBuffer(imageBytes));

                    Node child     = Scene.CreateChild();
                    child.Position = LeftCamera.Node.WorldPosition + LeftCamera.Node.WorldDirection * 2f;
                    child.LookAt(LeftCamera.Node.WorldPosition, Vector3.Up, TransformSpace.World);

                    child.Scale = new Vector3(1f, image.Height / (float)image.Width, 0.1f) / 10;
                    var texture = new Texture2D();
                    texture.SetData(image, true);

                    var material = new Material();
                    material.SetTechnique(0, CoreAssets.Techniques.Diff, 0, 0);
                    material.SetTexture(TextureUnit.Diffuse, texture);

                    var box = child.CreateComponent <Box>();
                    box.SetMaterial(material);

                    child.RunActions(new EaseBounceOut(new ScaleBy(1f, 5)));
                });
            }

            try
            {
                var client = new VisionServiceClient(VisionApiKey);
                if (readText)
                {
                    var ocrResult = await client.RecognizeTextAsync(memoryStream, detectOrientation : false);

                    var words = ocrResult.Regions.SelectMany(region => region.Lines).SelectMany(line => line.Words).Select(word => word.Text);
                    return("it says: " + string.Join(" ", words));
                }
                else
                {
                    // just describe the picture, you can also use cleint.AnalyzeImageAsync method to get more info
                    var result = await client.DescribeAsync(memoryStream);

                    return(result?.Description?.Captions?.FirstOrDefault()?.Text);
                }
            }
            catch (ClientException exc)
            {
                return(exc?.Error?.Message ?? "Failed");
            }
            catch (Exception exc)
            {
                return("Failed");
            }
        }
Exemplo n.º 7
0
    protected override async void OnNavigatedTo(NavigationEventArgs e)
    {
        try
        {
            var cameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            if (cameras.Count < 1)
            {
                Error.Text = "No camera found, decoding static image";
                await DecodeStaticResource();

                return;
            }
            MediaCaptureInitializationSettings settings;
            if (cameras.Count == 1)
            {
                settings = new MediaCaptureInitializationSettings {
                    VideoDeviceId = cameras[0].Id
                };                                                                                  // 0 => front, 1 => back
            }
            else
            {
                settings = new MediaCaptureInitializationSettings {
                    VideoDeviceId = cameras[1].Id
                };                                                                                  // 0 => front, 1 => back
            }
            await _mediaCapture.InitializeAsync(settings);

            VideoCapture.Source = _mediaCapture;
            await _mediaCapture.StartPreviewAsync();

            while (_result == null)
            {
                var photoStorageFile = await KnownFolders.PicturesLibrary.CreateFileAsync("scan.jpg", CreationCollisionOption.GenerateUniqueName);

                await _mediaCapture.CapturePhotoToStorageFileAsync(ImageEncodingProperties.CreateJpeg(), photoStorageFile);

                var stream = await photoStorageFile.OpenReadAsync();

                // initialize with 1,1 to get the current size of the image
                var writeableBmp = new WriteableBitmap(1, 1);
                writeableBmp.SetSource(stream);
                // and create it again because otherwise the WB isn't fully initialized and decoding
                // results in a IndexOutOfRange
                writeableBmp = new WriteableBitmap(writeableBmp.PixelWidth, writeableBmp.PixelHeight);
                stream.Seek(0);
                writeableBmp.SetSource(stream);
                _result = ScanBitmap(writeableBmp);
                await photoStorageFile.DeleteAsync(StorageDeleteOption.PermanentDelete);
            }
            await _mediaCapture.StopPreviewAsync();

            VideoCapture.Visibility = Visibility.Collapsed;
            CaptureImage.Visibility = Visibility.Visible;
            ScanResult.Text         = _result.Text;
        }
        catch (Exception ex)
        {
            Error.Text = ex.Message;
        }
    }
Exemplo n.º 8
0
        private async void btnCapture_Tapped(object sender, TappedRoutedEventArgs e)
        {
            Debug.WriteLine(1);
            await captureManager.VideoDeviceController.FocusControl.FocusAsync();

            Debug.WriteLine(2);
            //Create JPEG image Encoding format for storing image in JPEG type
            ImageEncodingProperties imgFormat = ImageEncodingProperties.CreateJpeg();

            Debug.WriteLine(3);
            //rotate and save the image
            using (var imageStream = new InMemoryRandomAccessStream())
            {
                Debug.WriteLine(4);
                //generate stream from MediaCapture
                await captureManager.CapturePhotoToStreamAsync(imgFormat, imageStream);

                Debug.WriteLine(5);
                //create decoder and encoder
                BitmapDecoder dec = await BitmapDecoder.CreateAsync(imageStream);

                BitmapEncoder enc = await BitmapEncoder.CreateForTranscodingAsync(imageStream, dec);

                Debug.WriteLine(6);
                //roate the image
                enc.BitmapTransform.Rotation = BitmapRotation.Clockwise90Degrees;

                Debug.WriteLine(7);
                //write changes to the image stream
                await enc.FlushAsync();

                Debug.WriteLine(8);
                // create storage file in local app storage
                TimeSpan span     = DateTime.Now.TimeOfDay;
                string   time     = String.Format("{0}{1}{2}", span.Hours, span.Minutes, span.Seconds);
                string   fileName = "#XLA_" + DateTime.Today.ToString("yyyyMMdd") + "_" + time + ".jpeg";
                file = await KnownFolders.CameraRoll.CreateFileAsync(fileName, CreationCollisionOption.GenerateUniqueName);

                //await captureManager.CapturePhotoToStorageFileAsync(imgFormat, file);

                // Get photo as a BitmapImage
                //bmpImage = new BitmapImage(new Uri(file.Path));


                Debug.WriteLine(9);
                using (var fileStream = await file.OpenStreamForWriteAsync())
                {
                    try
                    {
                        Debug.WriteLine(10);
                        //because of using statement stream will be closed automatically after copying finished
                        await RandomAccessStream.CopyAsync(imageStream, fileStream.AsOutputStream());

                        Debug.WriteLine(11);
                        rectCrop.Visibility = Visibility.Visible;
                        Debug.WriteLine(13);
                        await rectCrop.LoadImage(file);

                        Debug.WriteLine(12);
                    }
                    catch
                    {
                    }
                }
            }
        }
        private async Task ImageUpdate(bool isCommand)
        {
            DateTime currentTime = DateTime.UtcNow;

            // Just incase - stop code being called while photo already in progress
            if (this.cameraBusy)
            {
                return;
            }
            this.cameraBusy = true;

            try
            {
                using (Windows.Storage.Streams.InMemoryRandomAccessStream captureStream = new Windows.Storage.Streams.InMemoryRandomAccessStream())
                {
                    await this.mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream);

                    await captureStream.FlushAsync();

#if DEBUG
                    IStorageFile photoFile = await KnownFolders.PicturesLibrary.CreateFileAsync(ImageFilenameLocal, CreationCollisionOption.ReplaceExisting);

                    ImageEncodingProperties imageProperties = ImageEncodingProperties.CreateJpeg();
                    await this.mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photoFile);
#endif

                    string azureFilenameLatest  = string.Format(this.azureStorageimageFilenameLatestFormat, currentTime);
                    string azureFilenameHistory = string.Format(this.azureStorageImageFilenameHistoryFormat, currentTime);

                    LoggingFields imageInformation = new LoggingFields();
                    imageInformation.AddDateTime("TakenAtUTC", currentTime);
                    imageInformation.AddBoolean("IsCommand", isCommand);
#if DEBUG
                    imageInformation.AddString("LocalFilename", photoFile.Path);
#endif
                    imageInformation.AddString("AzureFilenameLatest", azureFilenameLatest);
                    imageInformation.AddString("AzureFilenameHistory", azureFilenameHistory);
                    this.logging.LogEvent("Saving image(s) to Azure storage", imageInformation);

                    // Update the latest image in storage
                    if (!string.IsNullOrWhiteSpace(azureFilenameLatest))
                    {
                        captureStream.Seek(0);
                        Debug.WriteLine("AzureIoT Hub latest image upload start");
                        await this.azureIoTHubClient.UploadToBlobAsync(azureFilenameLatest, captureStream.AsStreamForRead());

                        Debug.WriteLine("AzureIoT Hub latest image upload done");
                    }

                    // Upload the historic image to storage
                    if (!string.IsNullOrWhiteSpace(azureFilenameHistory))
                    {
                        captureStream.Seek(0);
                        Debug.WriteLine("AzureIoT Hub historic image upload start");
                        await this.azureIoTHubClient.UploadToBlobAsync(azureFilenameHistory, captureStream.AsStreamForRead());

                        Debug.WriteLine("AzureIoT Hub historic image upload done");
                    }
                }
            }
            catch (Exception ex)
            {
                this.logging.LogMessage("Image capture or AzureIoTHub storage upload failed " + ex.Message, LoggingLevel.Error);
            }
            finally
            {
                this.cameraBusy = false;
            }
        }
        private async Task <String> CapturePhoto()
        {
            var myPictures = await Windows.Storage.StorageLibrary.GetLibraryAsync(Windows.Storage.KnownLibraryId.Pictures);

            StorageFile file = await myPictures.SaveFolder.CreateFileAsync("photo.jpg", CreationCollisionOption.GenerateUniqueName);


            using (var captureStream = new InMemoryRandomAccessStream())
            {
                await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream);

                using (var fileStream = await file.OpenAsync(FileAccessMode.ReadWrite))
                {
                    var decoder = await BitmapDecoder.CreateAsync(captureStream);

                    var encoder = await BitmapEncoder.CreateForTranscodingAsync(fileStream, decoder);

                    var properties = new BitmapPropertySet {
                        { "System.Photo.Orientation", new BitmapTypedValue(PhotoOrientation.Normal, PropertyType.UInt16) }
                    };
                    await encoder.BitmapProperties.SetPropertiesAsync(properties);

                    await encoder.FlushAsync();

                    // decoder = null;
                    // encoder = null;
                    //await  fileStream.FlushAsync();
                    // fileStream.Dispose();
                }
                //await  captureStream.FlushAsync();
                // captureStream.Dispose();

                // GC.Collect();
            }

            var   ttv          = rect.TransformToVisual(previewElement);
            Point screenCoords = ttv.TransformPoint(new Point(0, 0));
            // screenCoords.X -= 120;
            // screenCoords.Y -= 120;

            StorageFile croppedFile = await myPictures.SaveFolder.CreateFileAsync("photocrop.jpg", CreationCollisionOption.GenerateUniqueName);

            // var returnedcropImage = await CropBitmap.GetCroppedBitmapAsync(file, screenCoords, rect.RenderSize, 1);
            await CropBitmap.SaveCroppedBitmapAsync(file, croppedFile, screenCoords, rect.RenderSize);

            ///Image bmp = new Image();
            // bmp.Source = returnedcropImage;

            // bmp.sa


            // String ret = await Upload("http://192.168.0.102:8090/api/Switch/UploadFiles", croppedFile);
            //String ret = "";
            await  ObserveObjects(file);

            await file.DeleteAsync();

            //await  croppedFile.DeleteAsync();
            //if(ret.Contains("closed"))
            //{
            //    ret = "Full";
            //}
            //else if(ret.Contains("open"))
            //{
            //    ret = "reserve";
            //}
            //else if (ret.Contains("bird"))
            //{
            //    ret = "empty";
            //}
            return("");
        }
        /// <summary>
        /// Takes a photo to a StorageFile and adds rotation metadata to it
        /// </summary>
        /// <returns></returns>
        private async Task TakePhotoAsync()
        {
            // While taking a photo, keep the video button enabled only if the camera supports simultaneously taking pictures and recording video
            VideoButton.IsEnabled = _mediaCapture.MediaCaptureSettings.ConcurrentRecordAndPhotoSupported;

            // Make the button invisible if it's disabled, so it's obvious it cannot be interacted with
            VideoButton.Opacity = VideoButton.IsEnabled ? 1 : 0;

            var stream = new InMemoryRandomAccessStream();

            Debug.WriteLine("Taking photo...");
            Speech.Speak("Thank you, authenticating you now.");
            await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), stream);

            try
            {
                string datetime = DateTime.Now.ToString("yy-MM-dd") + "-" + DateTime.Now.ToString("hh-mm-ss") + ".jpg";
                var    file     = await _captureFolder.CreateFileAsync(datetime, CreationCollisionOption.GenerateUniqueName);

                Debug.WriteLine("Photo taken! Saving to " + file.Path);

                var photoOrientation = CameraRotationHelper.ConvertSimpleOrientationToPhotoOrientation(_rotationHelper.GetCameraCaptureOrientation());

                await ReencodeAndSavePhotoAsync(stream, file, photoOrientation);

                Debug.WriteLine("Photo saved!");

                IStorageFile storageFile = await StorageFile.GetFileFromPathAsync(file.Path);

                IBuffer buffer = await FileIO.ReadBufferAsync(storageFile);

                byte[] bytes    = System.Runtime.InteropServices.WindowsRuntime.WindowsRuntimeBufferExtensions.ToArray(buffer);
                Stream streamer = new MemoryStream(bytes);
                Windows.Web.Http.HttpStreamContent streamContent = new Windows.Web.Http.HttpStreamContent(streamer.AsInputStream());

                var myFilter = new Windows.Web.Http.Filters.HttpBaseProtocolFilter();
                myFilter.AllowUI = false;
                var client = new Windows.Web.Http.HttpClient(myFilter);
                Windows.Web.Http.HttpResponseMessage result = await client.PostAsync(new Uri(GlobalData.protocol + GlobalData.ip + ":" + GlobalData.port + GlobalData.endpoint), streamContent);

                string stringReadResult = await result.Content.ReadAsStringAsync();

                Debug.WriteLine(stringReadResult);

                JToken token      = JObject.Parse(stringReadResult);
                int    identified = (int)token.SelectToken("Results");
                string Response   = (string)token.SelectToken("ResponseMessage");

                if (identified != 0)
                {
                    Debug.WriteLine("Identified " + identified);
                    this.Frame.Navigate(typeof(AppHome));
                }
                else
                {
                    Speech.Speak("Sorry we cannot authorise your request");
                }
            }
            catch (Exception ex)
            {
                // File I/O errors are reported as exceptions
                Debug.WriteLine("Exception when taking a photo: " + ex.ToString());
            }

            // Done taking a photo, so re-enable the button
            VideoButton.IsEnabled = true;
            VideoButton.Opacity   = 1;
        }
Exemplo n.º 12
0
        private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer)
        {
            if (_mediaCapture.CameraStreamState != Windows.Media.Devices.CameraStreamState.Streaming || !_frameProcessingSemaphore.Wait(0))
            {
                return;
            }

            if (!_motion)
            {
                Debug.WriteLine("No motion detected.");
                _frameProcessingSemaphore.Release();
                return;
            }

            try
            {
                var stream = new InMemoryRandomAccessStream();
                await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), stream);

                MemoryStream memStream = await ConvertFromInMemoryRandomAccessStream(stream);

                Face[] result = await _faceServiceClient.DetectAsync(memStream, false, false, new[] { FaceAttributeType.Emotion });

                string displayText = $"{result.Length} faces found | {DateTime.Now.ToLongTimeString()}";

                if (result.Any())
                {
                    List <EmotionResult> emotions = new List <EmotionResult>
                    {
                        new EmotionResult()
                        {
                            Name = "Anger", Score = result.First().FaceAttributes.Emotion.Anger, LedStatus = LedStatus.Red
                        },
                        new EmotionResult()
                        {
                            Name = "Happiness", Score = result.First().FaceAttributes.Emotion.Happiness, LedStatus = LedStatus.Green
                        },
                        new EmotionResult()
                        {
                            Name = "Neutral", Score = result.First().FaceAttributes.Emotion.Neutral, LedStatus = LedStatus.Blue
                        }
                    };

                    displayText += string.Join(", ", emotions.Select(a => $"{a.Name}: {(a.Score * 100.0f).ToString("#0.00")}"));

                    _rgbLed.TurnOnLed(emotions.OrderByDescending(a => a.Score).First().LedStatus);
                }

                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => StatusText.Text = displayText);

                Debug.WriteLine(displayText);
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception with ProcessCurrentVideoFrame: " + ex);
            }
            finally
            {
                _frameProcessingSemaphore.Release();
            }
        }
Exemplo n.º 13
0
        private async void CaptureBarcodeFromCamera(object data)
        {
            MessageDialog dialog = new MessageDialog(string.Empty);

            try
            {
                await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                {
                    if (!isCameraFound)
                    {
                        return;
                    }

                    ImageEncodingProperties imgFormat = ImageEncodingProperties.CreateJpeg();
                    // create storage file in local app storage
                    StorageFile file = await ApplicationData.Current.LocalFolder.CreateFileAsync(
                        "temp.jpg",
                        CreationCollisionOption.GenerateUniqueName);
                    // take photo
                    await captureMgr.CapturePhotoToStorageFileAsync(imgFormat, file);
                    // Get photo as a BitmapImage
                    BitmapImage bmpImage   = new BitmapImage(new Uri(file.Path));
                    bmpImage.CreateOptions = BitmapCreateOptions.IgnoreImageCache;
                    using (IRandomAccessStream fileStream = await file.OpenAsync(FileAccessMode.Read))
                    {
                        var properties =
                            captureMgr.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.Photo) as
                            VideoEncodingProperties;
                        wrb = new WriteableBitmap((int)properties.Width, (int)properties.Height);
                        wrb.SetSource(fileStream);
                        //wrb = await Windows.UI.Xaml.Media.Imaging.BitmapFactory.New(1, 1).FromStream(fileStream);
                    }
                    br = new BarcodeReader
                    {
                        AutoRotate = true,
                        Options    =
                            new DecodingOptions
                        {
                            PossibleFormats =
                                new BarcodeFormat[]
                            { BarcodeFormat.CODE_39, BarcodeFormat.QR_CODE, BarcodeFormat.PDF_417 },
                            TryHarder   = true,
                            PureBarcode = false
                        }
                    };
                    res = br.Decode(wrb);
                    if (res != null)
                    {
                        BarcodeContent = res.Text;
                        CameraClickedEventArgs cameraArgs = new CameraClickedEventArgs {
                            EncodedData = this.BarcodeContent
                        };
                        if (this.EmailDecoded != null)
                        {
                            EmailDecoded(this, cameraArgs);
                        }
                    }

                    timer.Change(4000, Timeout.Infinite);
                });
            }

            catch (Exception ex)
            {
                dialog = new MessageDialog("Error: " + ex.Message);
                dialog.ShowAsync();
            }
        }
Exemplo n.º 14
0
        async void GetEmotions(object sender, object e)
        {
            var ms = new MemoryStream();

            // Uri uri = new Uri("ms-appx:///Assets/WIN_20160205_23_45_55_Pro.jpg");
            StorageFile file = await ApplicationData.Current.LocalFolder.CreateFileAsync(
                "TestPhoto.jpg",
                CreationCollisionOption.GenerateUniqueName);

            await MC.CapturePhotoToStorageFileAsync(ImageEncodingProperties.CreateJpeg(), file);

            //.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), ms.AsRandomAccessStream());

            ms.Position = 0L;
            var ms1 = new MemoryStream();
            await ms.CopyToAsync(ms1);

            ms.Position = 0L;
            var ms2 = new MemoryStream();

            var randomAccessStream = await file.OpenReadAsync();

            Stream stream = randomAccessStream.AsStreamForRead();

            Microsoft.ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(stream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.FacialHair, FaceAttributeType.Smile, FaceAttributeType.Glasses });

            var randomAccessStream2 = await file.OpenReadAsync();

            Stream stream2 = randomAccessStream2.AsStreamForRead();
            var    Emo     = await Oxford.RecognizeAsync(stream2);

            if (Emo != null && Emo.Length > 0)
            {
                var Face = Emo[0];
                var s    = Face.Scores;
                if (faces[0].FaceAttributes.Gender.Equals("male"))
                {
                    faces[0].FaceAttributes.Gender = "мужчина";
                }
                else
                {
                    faces[0].FaceAttributes.Gender = "женщина";
                }

                Speak(faces);
                //Wait();
                //if (s.Surprise > 0.8)
                //{
                //    if (!SentSurprize)
                //    {
                //        ms1.Position = 0L;
                //        var u = await SendPicture(ms1);
                //        await RoverServices.InsertSF(u, s.Surprise);
                //        SentSurprize = true;
                //    }
                //}


                var T = new Thickness();
                T.Left = Face.FaceRectangle.Left;
                T.Top  = Face.FaceRectangle.Top;
                MyEmo.Update(Face.Scores);

                //await RoverServices.Insert(Face.Scores);
            }
        }