public static async Task<ImgurEntity> UploadImgur(IRandomAccessStream fileStream)
 {
     try
     {
         var imageData = new byte[fileStream.Size];
         for (int i = 0; i < imageData.Length; i++)
         {
             imageData[i] = (byte)fileStream.AsStreamForRead().ReadByte();
         }
         var theAuthClient = new HttpClient();
         var request = new HttpRequestMessage(HttpMethod.Post, "https://api.imgur.com/3/image");
         request.Headers.Authorization = new AuthenticationHeaderValue("Client-ID", "e5c018ac1f4c157");
         var form = new MultipartFormDataContent();
         var t = new StreamContent(fileStream.AsStream());
         // TODO: See if this is the correct way to use imgur's v3 api. I can't see why we would still need to convert images to base64.
         string base64Img = Convert.ToBase64String(imageData);
         t.Headers.ContentType = new MediaTypeHeaderValue("image/jpeg");
         form.Add(new StringContent(base64Img), @"image");
         form.Add(new StringContent("file"), "type");
         request.Content = form;
         HttpResponseMessage response = await theAuthClient.SendAsync(request);
         string responseString = await response.Content.ReadAsStringAsync();
         if (responseString == null) return null;
         var imgurEntity = JsonConvert.DeserializeObject<ImgurEntity>(responseString);
         return imgurEntity;
     }
     catch (WebException)
     {
     }
     catch (IOException)
     {
         return null;
     }
     return null;
 }
        public static String ExportToFile(IRandomAccessStream exportStream)
        {
            StringBuilder exportResults = new StringBuilder();

            using (AppStreamWriter sw = new AppStreamWriter(exportStream.AsStream()))
            {
                LoadLessonsUnsorted();

                //write Number of Lessons
                sw.WriteLine(AppData.Lessons.Length.ToString());

                int exportedLessons = 0;
                int exportedWords = 0;
                int exportedSentences = 0;
                int exportedKanjis = 0;

                foreach (Lesson lesson in AppData.Lessons)
                {
                    sw.WriteLine(lesson.ToExportString());

                    switch (lesson.type)
                    {
                        case 0: exportedWords += WriteVocabLesson(sw, lesson); break;
                        case 1: exportedSentences += WriteInsertLesson(sw, lesson); break;
                        case 2: break;
                        case 3: exportedKanjis += WriteKanjiLesson(sw, lesson); break;
                        case 4: break;
                    }

                    ++exportedLessons;
                }

                exportResults.AppendLine("Datenbank erfolgreich exportiert!");
                exportResults.AppendLine("Exportierte Lektionen\t: " + exportedLessons);
                exportResults.AppendLine("Exportierte Wörter\t: " + exportedWords);
                exportResults.AppendLine("Exportierte Lückentexte\t: " + exportedSentences);
                exportResults.AppendLine("Exportierte Kanjis\t\t: " + exportedKanjis);
            }

            AppData.Lessons = null;
            AppData.Words = null;
            AppData.Sentences = null;
            AppData.Kanjis = null;

            return exportResults.ToString();
        }
Exemple #3
0
        /// <summary>
        /// Copies an image from the internet (http protocol) locally to the AppData LocalFolder.  This is used by some methods
        /// (like the SecondaryTile constructor) that do not support referencing images over http but can reference them using
        /// the ms-appdata protocol.
        /// </summary>
        /// <param name="internetUri">The path (URI) to the image on the internet</param>
        /// <param name="uniqueName">A unique name for the local file</param>
        /// <returns>Path to the image that has been copied locally</returns>
        public static async Task <Uri> GetLocalImageAsync(string internetUri, string uniqueName)
        {
            if (string.IsNullOrEmpty(internetUri))
            {
                return(new Uri("ms-appx:///Assets/Square150x150Logo.scale-200.png", UriKind.Absolute));
            }

            using (var response = await HttpWebRequest.CreateHttp(internetUri).GetResponseAsync())
            {
                using (var stream = response.GetResponseStream())
                {
                    var         desiredName = string.Format("{0}.jpg", uniqueName);
                    StorageFile file        = await ApplicationData.Current.LocalFolder.CreateFileAsync(desiredName, CreationCollisionOption.ReplaceExisting);

                    using (IRandomAccessStream filestream = await file.OpenAsync(FileAccessMode.ReadWrite))
                    {
                        await stream.CopyToAsync(filestream.AsStream());

                        return(new Uri(string.Format("ms-appdata:///local/{0}.jpg", uniqueName), UriKind.Absolute));
                    }
                }
            }
        }
        public async void ContinueFileSavePicker(FileSavePickerContinuationEventArgs args)
        {
            if ((args.ContinuationData["Operation"] as string) == "ExportDatabase" && args.File != null)
            {
                StorageFile file = args.File;

                IRandomAccessStream fileStream = await file.OpenAsync(FileAccessMode.ReadWrite);

                DataManager.ConnectToDatabase();
                String exportStatus = DataManager.ExportToFile(fileStream.AsStream());
                DataManager.CloseConnection();

                fileStream.Dispose();

                MessageBox.Show(exportStatus);
            }
            else
            {
                MessageBox.Show("Export fehlgeschlagen!");
            }

            exportDatabase = false;
        }
Exemple #5
0
    public static async void Send(IRandomAccessStream stream)
    {
        if (stream == null)
        {
            Show("NoData");
            return;
        }
        Show("Sending");
        using (StreamSocket socket = new StreamSocket())
        {
            try
            {
                // 发起连接
                await socket.ConnectAsync(new HostName(IP_CHE), LISTEN_PORT);

                using (DataWriter writer = new DataWriter(socket.OutputStream))
                {
                    // 先写个长度
                    var length = (uint)stream.AsStream().Length;
                    writer.WriteUInt32(length);
                    // 再传输Buffer
                    IBuffer buffer = await StreamToBuffer(stream);

                    writer.WriteBuffer(buffer);
                    await writer.StoreAsync();

                    Show("Sended");
                    // OnTakeAnother();
                }
            }
            catch (Exception e)
            {
                Show(e.Message);
            }
        }
    }
Exemple #6
0
        private async void getEmotion_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                if (emotionResult != null)
                {
                    output.Text = "your emotions are: \n" +
                                  "Happiness: " + emotionResult[0].Scores.Happiness + "\n" +
                                  "Sadness: " + emotionResult[0].Scores.Sadness + "\n" +
                                  "Surprise: " + emotionResult[0].Scores.Surprise + "\n" +
                                  "Fear: " + emotionResult[0].Scores.Fear + "\n" +
                                  "Anger: " + emotionResult[0].Scores.Anger + "\n" +
                                  "Contempt: " + emotionResult[0].Scores.Contempt + "\n" +
                                  "Disgust: " + emotionResult[0].Scores.Disgust + "\n" +
                                  "Neutral: " + emotionResult[0].Scores.Neutral + "\n";
                }
            }
            catch
            {
                output.Text = "Error returning the emotion";
            }
        }
Exemple #7
0
        private async void getemotion_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                if (emotionResult != null)
                {
                    Microsoft.ProjectOxford.Common.Contract.EmotionScores score = emotionResult[0].Scores;
                    output.Text = "Your emotions are: \n" +
                                  "Happiness: " + score.Happiness.ToString("0.00000") + "\n" +
                                  "Sadness: " + score.Sadness.ToString("0.00000") + "\n" +
                                  "Surprise: " + score.Surprise.ToString("0.00000") + "\n" +
                                  "Fear: " + score.Fear.ToString("0.00000") + "\n" +
                                  "Anger: " + score.Anger.ToString("0.00000") + "\n" +
                                  "Disgust: " + score.Disgust.ToString("0.00000") + "\n" +
                                  "Neutral: " + score.Neutral.ToString("0.00000") + "\n";
                }
            }
            catch
            {
                output.Text = "Error calling emotion";
            }
        }
        public static String ImportFromFile(IRandomAccessStream importStream)
        {
            StringBuilder importResults = new StringBuilder();

            using (AppStreamReader sr = new AppStreamReader(importStream.AsStream()))
            {
                try
                {
                    String line = sr.ReadLine();

                    do
                    {
                        String[] parts = line.Split('|');
                        int operation = Convert.ToInt32(parts[0]);
                        int itemCount = Convert.ToInt32(parts[1]);

                        String results = "";

                        switch (operation)
                        {
                            case 0: results = AddLessons(sr, itemCount); break;
                            case 1: results = UpdateLessons(sr, itemCount); break;
                            case 2: results = UpdateWords(sr, itemCount); break;
                            case 3: results = UpdateKanjis(sr, itemCount); break;
                            case 4: results = AddWords(sr, itemCount); break;
                            case 5: results = AddKanjis(sr, itemCount); break;
                        }

                        importResults.Append(results);

                        line = sr.ReadLine();
                    }
                    while (line != null);

                    context.SubmitChanges();
                }
                catch (Exception e)
                {
                    importResults.Clear();
                    importResults.AppendLine("Import Fehlgeschlagen in Zeile: " + sr.CurrentLine);
                    importResults.AppendLine("System: " + e.Message);
                }
            }

            return importResults.ToString();
        }
        private async void OnSelectImageButtonClicked(object sender, RoutedEventArgs e)
        {
            var picker = new FileOpenPicker();

            picker.ViewMode = PickerViewMode.Thumbnail;
            picker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
            picker.FileTypeFilter.Add(".jpg");
            picker.FileTypeFilter.Add(".jpeg");
            picker.FileTypeFilter.Add(".png");

            var file = await picker.PickSingleFileAsync();

            if (file != null)
            {
                using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read))
                {
                    // Show the image
                    var image = new BitmapImage();
                    await image.SetSourceAsync(stream);

                    LoadedImage.Source = image;
                    stream.Seek(0L);

                    try
                    {
                        Progress.IsActive  = true;
                        Overlay.Visibility = Visibility.Visible;

                        // Submit the image to the Custom Vision Service
                        CustomVisionPredictionClient client = new CustomVisionPredictionClient(
                            new ApiKeyServiceClientCredentials(_key),
                            new System.Net.Http.DelegatingHandler[] { }
                            );

                        client.Endpoint = _uri;

                        var result = await client.ClassifyImageAsync(_id, _name, stream.AsStream());

                        var prediction = result.Predictions.FirstOrDefault(x => x.TagName.ToLowerInvariant() == "hotdog");

                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;

                        // Show the results
                        if (prediction != null)
                        {
                            // If the results include a "hotdog" label, show the probability that it's a hot dog
                            await new MessageDialog($"Probability that it's a hot dog: {prediction.Probability:P1}").ShowAsync();
                        }
                        else
                        {
                            // If the results don't include a "hotdog" label, show all tags and probabilities
                            var builder = new StringBuilder();
                            foreach (var pred in result.Predictions)
                            {
                                builder.Append($"{pred.TagName}: {pred.Probability:P1}\n");
                            }

                            await new MessageDialog(builder.ToString()).ShowAsync();
                        }
                    }
                    catch (Exception ex)
                    {
                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;

                        await new MessageDialog(ex.Message).ShowAsync();
                    }
                    finally
                    {
                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;
                    }
                }
            }
        }
Exemple #10
0
        private async void button_LogIn_Click(object sender, RoutedEventArgs e)
        {
            tb_LogInResult.Text = "";

            try
            {
                var memstream = new InMemoryRandomAccessStream();

                //Define image format (JPEG)
                ImageEncodingProperties imgFormat = ImageEncodingProperties.CreateJpeg();

                //Capture Photo to random access memory stream
                await mediaCapture.CapturePhotoToStreamAsync(imgFormat, memstream);

                IRandomAccessStream stream = memstream.CloneStream();
                if (stream != null)
                {
                    if (!progressRing.IsActive)
                    {
                        progressRing.IsActive = true;

                        //Decode to bitmap format
                        BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                        SoftwareBitmap bitmap = await decoder.GetSoftwareBitmapAsync();

                        SoftwareBitmap       bitmap2 = SoftwareBitmap.Convert(bitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                        SoftwareBitmapSource bmpSrc  = new SoftwareBitmapSource();
                        await bmpSrc.SetBitmapAsync(bitmap2);

                        //Set source for the image control to display the captured image
                        PhotoControl.Source = bmpSrc;

                        //Upload image to Azure blob and retrieve the image URL
                        string fileName = "temp" + DateTime.Now.ToString("ddMMyyyyhhmmtt") + ".png";
                        var    imageUrl = await storageClient.uploadPhotoAsync("temp", fileName, stream.AsStream());

                        if (!(imageUrl is Exception))
                        {
                            //Detect face (if any) in the image
                            var FaceIDResponse = await faceClient.DetectFaceAsync(imageUrl.ToString());

                            if (FaceIDResponse.IsSuccessStatusCode)
                            {
                                //Detect face responds with face attributes if any face has been identified
                                var results = JsonConvert.DeserializeObject <DetectFace.FaceClass[]>(await FaceIDResponse.Content.ReadAsStringAsync());

                                //No face found in the image
                                if (results.ToArray().Count() == 0)
                                {
                                    msg.Title   = "Unable to register face!";
                                    msg.Content = "No face found in the image, please try again!";
                                    await msg.ShowAsync();

                                    return;
                                }

                                //Add all Face IDs to a list
                                List <string> FaceIDList = new List <string>();
                                foreach (var item in results)
                                {
                                    FaceIDList.Add(item.faceId);
                                }

                                //Check if the face matches any person in the person group with a confidence of 0.5F
                                var candidateResponse = await faceClient.IdentifyFaceAsync(0.5F, FaceIDList.ToArray(), 2, AppSettings.defaultPersonGroupID);

                                var responseContent = candidateResponse.Content.ReadAsStringAsync().Result;
                                if (candidateResponse.IsSuccessStatusCode)
                                {
                                    //Identify function responds with a list of candidates to which the face matches and the confidence
                                    var result = JsonConvert.DeserializeObject <IdentifyFaceResponseModel[]>(responseContent);

                                    //Getting the candidate with highest confidence level
                                    var    candidate = result[0].candidates[0];
                                    string personID  = localSettings.Values["PersonId"].ToString();

                                    //Check if the confidence level is greater than 0.5 and if the Person ID of
                                    //the candidate does not match the person ID of the logged in user
                                    if ((candidate.confidence > 0.5) && (candidate.personId == personID))
                                    {
                                        //Face matches - proceed to transaction
                                        Frame.Navigate(typeof(TransactionPage));
                                    }
                                    else
                                    {
                                        //Face does not match
                                        msg.Title   = "Unable to log in!";
                                        msg.Content = "Face does not match records or do not meet the required threshold!";
                                        await msg.ShowAsync();

                                        Frame.GoBack();
                                    }
                                }
                                else
                                {
                                    msg.Title   = "Unable to log in!";
                                    msg.Content = responseContent;
                                    await msg.ShowAsync();

                                    Frame.GoBack();
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                msg.Title   = "Unable to log in!";
                msg.Content = ex.Message;
                await msg.ShowAsync();
            }
            progressRing.IsActive = false;
            PhotoControl.Source   = null;
        }
        public FileSummary(BitmapImage image, StorageFile file, IDictionary<string, object> properties, IRandomAccessStream stream)
        {
            JpegInfo exifInfo = ExifReader.ReadJpeg(stream.AsStream());

            this.BasicData = new List<ExifDatum>
            {
                new ExifDatum("Name", file.Name),
                new ExifDatum("Path", file.Path),
                new ExifDatum("Created", file.DateCreated.ToString()),
                new ExifDatum("Dimensions", image.PixelWidth.ToString() + "px by " + image.PixelHeight.ToString() + "px")
            };

            if (properties.ContainsKey("size"))
            {
                double num = double.Parse(properties["size"].ToString(), System.Globalization.CultureInfo.InvariantCulture);

                this.BasicData.Add(new ExifDatum("Size", ConvertBitSize(num)));
            }

            this.ExifData = exifInfo
                .GetType()
                .GetRuntimeFields()
                .Select(field => new ExifDatum(field, field.GetValue(exifInfo)))
                .Where(datum => datum.DisplayValue != null)
                .OrderBy(datum => datum.Name)
                .ToList();

            this.FullData = properties
                .Where(field => field.Value != null && field.Key[0] != '{')
                .Select(field => ConvertPairToDatum(field))
                .OrderBy(datum => datum.Name)
                .ToList();
        }
        private async void BTNpost_Click(object sender, RoutedEventArgs e)
        {
            PRGRSpost.ProgressStart();
            String content = TXTBLKpost.Text;

            if (content.Length > 1000)
            {
                Constants.BoxPage.ShowMessage("树洞内容不要太长");
                return;
            }
            if ("".Equals(content) && HoleType == HoleInfo.TYPE_TEXT)
            {
                Constants.BoxPage.ShowMessage("不能没有树洞内容");
                return;
            }
            List <Parameters> param = new List <Parameters>();
            String            type  = "text";

            switch (HoleType)
            {
            case 2:
            {
                type = "image";
            }
            break;

            case 3:
            {
                type = "audio";
            }
            break;
            }
            param.Add(new Parameters("action", "dopost"));

            switch (HoleType)
            {
            case 2:
            {
                type = "image";
                try
                {
                    var a = await file.OpenAsync(FileAccessMode.ReadWrite);

                    Stream stream  = a.AsStream();
                    byte[] bts     = Util.StreamToBytes(stream);
                    String bmp_str = Convert.ToBase64String(bts);
                    param.Add(new Parameters("data", bmp_str));
                }
                catch
                {
                    Constants.BoxPage.ShowMessage("添加图像内容失败");
                    PRGRSpost.ProgressEnd();
                    return;
                }
            } break;

            case 3:
            {
                type = "audio";
                try
                {
                    Stream stream  = audioStream.AsStream();
                    byte[] bts     = Util.StreamToBytes(stream);
                    String bmp_str = Convert.ToBase64String(bts);
                    param.Add(new Parameters("data", bmp_str));
                    param.Add(new Parameters("length", Length + ""));
                }
                catch
                {
                    Constants.BoxPage.ShowMessage("添加音频内容失败");
                    PRGRSpost.ProgressEnd();
                    return;
                }
            } break;
            }
            param.Add(new Parameters("text", content));
            param.Add(new Parameters("token", Constants.token));
            param.Add(new Parameters("type", type));

            Parameters result = await WebConnection.Connect(Constants.domain + "/pkuhelper/../services/pkuhole/api.php", param);

            try
            {
                if (result.name != "200")
                {
                    Constants.BoxPage.ShowMessage("发布失败");
                    PRGRSpost.ProgressEnd();
                    return;
                }
                JsonObject jsonObject = JsonObject.Parse(result.value);
                int        code;
                try
                {
                    code = (int)jsonObject.GetNamedNumber("code");
                }
                catch
                {
                    code = int.Parse(jsonObject.GetNamedString("code"));
                }
                if (code != 0)
                {
                    Constants.BoxPage.ShowMessage("发布失败");
                    PRGRSpost.ProgressEnd();
                    return;
                }
                else
                {
                    Constants.BoxPage.ShowMessage("发布成功!");
                    PRGRSpost.ProgressEnd();
                    BTNback_Click(null, null);

                    //这里应该加入以后导航到发表的页面
                    return;
                }
            }
            catch (Exception)
            {
                Constants.BoxPage.ShowMessage("发布失败");
                PRGRSpost.ProgressEnd();
                return;
            }
        }
        public async Task <MapFile> GetFile()
        {
            //https://github.com/Microsoft/Windows-universal-samples/blob/e13cf5dca497ad661706d150a154830666913be4/Samples/PdfDocument/cs/Scenario1_Render.xaml.cs
            var picker = new Windows.Storage.Pickers.FileOpenPicker();

            picker.ViewMode = Windows.Storage.Pickers.PickerViewMode.Thumbnail;
            picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.PicturesLibrary;
            picker.FileTypeFilter.Add(".jpg");
            picker.FileTypeFilter.Add(".jpeg");
            picker.FileTypeFilter.Add(".png");
            picker.FileTypeFilter.Add(".pdf");
            picker.FileTypeFilter.Add(".svg");
            StorageFile file = await picker.PickSingleFileAsync();

            //return file;


            MapFile mapFile = new MapFile();

            if (file.FileType == ".pdf")
            {
                PdfDocument pdfDocument = await PdfDocument.LoadFromFileAsync(file);


                using (PdfPage page = pdfDocument.GetPage(0))
                {
                    using (var stream = new InMemoryRandomAccessStream())
                    {
                        await page.RenderToStreamAsync(stream);

                        BitmapImage bitmap = new BitmapImage();
                        await bitmap.SetSourceAsync(stream);



                        using (var stream2 = new InMemoryRandomAccessStream())
                        {
                            PdfPageRenderOptions options;

                            await page.RenderToStreamAsync(stream2);

                            //mapFile.MapBytes = new byte[bitmap.PixelHeight*bitmap.PixelWidth];
                            mapFile.MapBytes = new byte[(uint)stream2.AsStream().Length];
                            await stream2.AsStreamForRead().ReadAsync(mapFile.MapBytes, 0, mapFile.MapBytes.Length);

                            //WriteableBitmap writableBitmap = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight);
                            //await writableBitmap.SetSourceAsync(stream2);
                            //mapFile.MapSoftwareImageSource = writableBitmap;
                        }


                        //
                        //mapFile.MapSoftwareImageSource.SetSource(stream2);
                        //BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);
                        //mapFile.MapSoftwareImageSource = await decoder.GetSoftwareBitmapAsync();



                        mapFile.MapImageSource = bitmap;
                    }
                }
            }
            //http://igrali.com/2015/12/24/how-to-render-svg-in-xaml-windows-10-uwp/
            if (file.FileType == ".svg")
            {
                using (IRandomAccessStream stream = await file.OpenReadAsync())
                {
                    //SvgImageSource svgImage = new SvgImageSource();
                    //await svgImage.SetSourceAsync(stream);
                    //mapFile.svgImageSource = svgImage;

                    string svgString = await Windows.Storage.FileIO.ReadTextAsync(file);


                    //mapFile.MapImageSource = svgImage;
                    mapFile.MapSvgSource = svgString;
                    mapFile.IsSvg        = true;
                }
            }

            if (file.FileType == ".jpg" || file.FileType == ".jpeg" || file.FileType == ".png")
            {
                using (IRandomAccessStream stream = await file.OpenReadAsync())
                {
                    BitmapImage bitmap = new BitmapImage();

                    await bitmap.SetSourceAsync(stream);

                    mapFile.MapImageSource = bitmap;
                }
                using (IRandomAccessStream stream = await file.OpenReadAsync())
                {
                    mapFile.MapBytes = new byte[(uint)stream.AsStream().Length];
                    await stream.AsStreamForRead().ReadAsync(mapFile.MapBytes, 0, mapFile.MapBytes.Length);
                }
            }

            return(mapFile);
        }
Exemple #14
0
        private async void OnSelectImageButtonClicked(object sender, RoutedEventArgs e)
        {
            var picker = new FileOpenPicker();

            picker.ViewMode = PickerViewMode.Thumbnail;
            picker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
            picker.FileTypeFilter.Add(".jpg");
            picker.FileTypeFilter.Add(".jpeg");
            picker.FileTypeFilter.Add(".png");

            var file = await picker.PickSingleFileAsync();

            if (file != null)
            {
                using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read))
                {
                    // Show the image
                    var image = new BitmapImage();
                    await image.SetSourceAsync(stream);

                    LoadedImage.Source = image;
                    stream.Seek(0L);

                    try
                    {
                        Progress.IsActive  = true;
                        Overlay.Visibility = Visibility.Visible;

                        // Submit the image to the Custom Vision Service
                        CustomVisionPredictionClient client = new CustomVisionPredictionClient()
                        {
                            ApiKey   = _key,
                            Endpoint = _uri
                        };

                        var result = await client.ClassifyImageAsync(_id, _name, stream.AsStream());

                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;

                        // Show the result
                        var probability = result.Predictions.FirstOrDefault(x => x.TagName.ToLowerInvariant() == "hotdog").Probability;

                        if (probability > 0.90)
                        {
                            await new MessageDialog("It's a hot dog!").ShowAsync();
                        }
                        else
                        {
                            await new MessageDialog("Not a hot dog").ShowAsync();
                        }
                    }
                    catch (Exception ex)
                    {
                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;

                        await new MessageDialog(ex.Message).ShowAsync();
                    }
                    finally
                    {
                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;
                    }
                }
            }
        }
        public async void makeRecommendation(IRandomAccessStream ras)
        {
            var att = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Glasses };

            using (Stream s = ras.AsStream()) //File.OpenRead(imageFilePath))
            {
                var a = await FaceServiceClient.DetectAsync(s, returnFaceAttributes : att);


                for (int i = 0; i < a.Length; i++)
                {
                    Face face     = a[i];
                    var  features = face.FaceAttributes;
                    age = features.Age;
                    if (features.FacialHair.Moustache >= 0.4 || features.FacialHair.Beard >= 0.4 || features.FacialHair.Sideburns >= 0.4 && features.Gender == "male")
                    {
                        ResultTitle.Text    = "Little Book of Beards";
                        BookCover.Source    = new BitmapImage(new Uri("http://i357.photobucket.com/albums/oo17/nikisiasoco/bookOfBeards_zpsl7aqlwwa.jpg?t=1498900330"));
                        SummaryContent.Text = "A book for men, the manliest of men. Our bearded guardians, our stubbled knights.";
                    }
                    else if (age < 15)
                    {
                        ResultTitle.Text    = "Twilight";
                        BookCover.Source    = new BitmapImage(new Uri("http://i357.photobucket.com/albums/oo17/nikisiasoco/Twilightbook_zpszj9qzcry.jpg"));
                        SummaryContent.Text = "If you like apples, theres an apple on the cover. I guess.";
                    }
                    else if (age > 45)
                    {
                        ResultTitle.Text    = "The Prince";
                        BookCover.Source    = new BitmapImage(new Uri("http://i357.photobucket.com/albums/oo17/nikisiasoco/thePrince_zpsvxuexh1h.jpg"));
                        SummaryContent.Text = "Tyranny 101.";
                    }
                    else if (age < 50 && age > 16)
                    {
                        if (features.Emotion.Happiness > 0.5)
                        {
                            ResultTitle.Text    = "You're Never Weird on the Internet";
                            BookCover.Source    = new BitmapImage(new Uri("http://i357.photobucket.com/albums/oo17/nikisiasoco/funny_zpsehbxbjtg.jpg?t=1498958983"));
                            SummaryContent.Text = "This book is read by alot of happy adults.";
                        }
                        else
                        {
                            ResultTitle.Text    = "Kafka on the Shore";
                            BookCover.Source    = new BitmapImage(new Uri("http://i357.photobucket.com/albums/oo17/nikisiasoco/kafkaOntheShore_zpsl85mx39u.jpg"));
                            SummaryContent.Text = "You look like you have good taste.";
                        }

                        /*
                         * System.Diagnostics.Debug.WriteLine(features.FacialHair.Moustache);
                         * System.Diagnostics.Debug.WriteLine(features.FacialHair.Sideburns);
                         * System.Diagnostics.Debug.WriteLine(features.FacialHair.Beard);
                         * System.Diagnostics.Debug.WriteLine(age+"Age inside algorithm. If this works, my calculations are right");
                         */
                    }

                    /*else
                     * {
                     *  System.Diagnostics.Debug.WriteLine("It's still broken, you made a mistake.");
                     * }*/
                }
            }
        }
Exemple #16
0
        public static async Task <Photo> ResizeJpeg(IRandomAccessStream chosenPhoto, uint size, string originalFileName, string localFileName, double?quality = null)
        {
            Photo photo = null;
            var   orientedPixelHeight = 0u;
            var   orientedPixelWidth  = 0u;

            //using (var sourceStream = chosenPhoto)
            {
                var decoder = await BitmapDecoder.CreateAsync(chosenPhoto);

                if (decoder.DecoderInformation != null)
                {
                    var maxDimension = Math.Max(decoder.PixelWidth, decoder.PixelHeight);
                    var scale        = (double)size / maxDimension;
                    if (scale < 1.0)
                    {
                        var orientedScaledHeight = (uint)(decoder.OrientedPixelHeight * scale);
                        var orientedScaledWidth  = (uint)(decoder.OrientedPixelWidth * scale);
                        var scaledHeight         = (uint)(decoder.PixelHeight * scale);
                        var scaledWidth          = (uint)(decoder.PixelWidth * scale);

                        var transform = new BitmapTransform {
                            ScaledHeight = scaledHeight, ScaledWidth = scaledWidth, InterpolationMode = BitmapInterpolationMode.Fant
                        };
                        var pixelData = await decoder.GetPixelDataAsync(
                            decoder.BitmapPixelFormat,
                            decoder.BitmapAlphaMode,
                            transform,
                            ExifOrientationMode.RespectExifOrientation,
                            ColorManagementMode.DoNotColorManage);

                        using (var destinationStream = new InMemoryRandomAccessStream())
                        {
                            var propertySet = new BitmapPropertySet();
                            if (quality.HasValue && quality > 0.0 && quality <= 1.0)
                            {
                                var qualityValue = new BitmapTypedValue(quality, Windows.Foundation.PropertyType.Single);
                                propertySet.Add("ImageQuality", qualityValue);
                            }
                            var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, destinationStream, propertySet);

                            encoder.SetPixelData(decoder.BitmapPixelFormat, BitmapAlphaMode.Premultiplied, orientedScaledWidth, orientedScaledHeight, decoder.DpiX, decoder.DpiY, pixelData.DetachPixelData());
                            await encoder.FlushAsync();

                            var reader = new DataReader(destinationStream.GetInputStreamAt(0));
                            var bytes  = new byte[destinationStream.Size];
                            await reader.LoadAsync((uint)destinationStream.Size);

                            reader.ReadBytes(bytes);

                            photo = new Photo
                            {
                                Bytes    = bytes,
                                Width    = (int)orientedScaledWidth,
                                Height   = (int)orientedScaledHeight,
                                FileName = originalFileName
                            };

                            if (!string.IsNullOrEmpty(localFileName))
                            {
                                photo.File = await SaveToLocalFolderAsync(destinationStream.AsStream(), localFileName);
                            }
                        }
                    }

                    orientedPixelHeight = decoder.OrientedPixelHeight;
                    orientedPixelWidth  = decoder.OrientedPixelWidth;
                }
            }

            if (photo == null)
            {
                var reader = new DataReader(chosenPhoto.GetInputStreamAt(0));
                var bytes  = new byte[chosenPhoto.Size];
                await reader.LoadAsync((uint)chosenPhoto.Size);

                reader.ReadBytes(bytes);

                photo = new Photo
                {
                    Bytes    = bytes,
                    Width    = (int)orientedPixelWidth,
                    Height   = (int)orientedPixelHeight,
                    FileName = originalFileName
                };

                if (!string.IsNullOrEmpty(localFileName))
                {
                    photo.File = await SaveToLocalFolderAsync(chosenPhoto.AsStream(), localFileName);
                }
            }

            chosenPhoto.Dispose();

            return(photo);
        }
Exemple #17
0
        private async void BackButton_Click()
        {
            MediaCapture _mediaCapture = new MediaCapture();
            await _mediaCapture.InitializeAsync();

            double val_res = 0.0;
            double aro_res = 0.0;
            Dictionary <int, Tuple <double, double> > imageData =
                new Dictionary <int, Tuple <double, double> >();


            Dictionary <string, Tuple <double, double> > emoData =
                new Dictionary <string, Tuple <double, double> >();

            emoData.Add("Neutral", Tuple.Create(4.98, 3.91));
            emoData.Add("Sadness", Tuple.Create(2.33, 4.55));
            emoData.Add("Disgust", Tuple.Create(3.5, 6.32));
            emoData.Add("Anger", Tuple.Create(2.33, 8.46));
            emoData.Add("Surprise", Tuple.Create(2.87, 8.11));
            emoData.Add("Fear", Tuple.Create(2.11, 8.52));
            emoData.Add("Happiness", Tuple.Create(7.09, 8.52));
            imageData.Add(1, Tuple.Create(45.698, 56.948));
            imageData.Add(2, Tuple.Create(2.951, 80.254));
            imageData.Add(3, Tuple.Create(44.751, 46.579));
            imageData.Add(4, Tuple.Create(58.107, 41.844));
            imageData.Add(5, Tuple.Create(0.72, 92.402));
            imageData.Add(6, Tuple.Create(6.758, 78.315));
            imageData.Add(7, Tuple.Create(13.962, 72.308));
            imageData.Add(8, Tuple.Create(5.666, 81.855));
            imageData.Add(9, Tuple.Create(10.942, 68.792));
            imageData.Add(10, Tuple.Create(5.095, 60.461));
            imageData.Add(11, Tuple.Create(17.421, 79.824));
            imageData.Add(12, Tuple.Create(3.675, 83.936));
            imageData.Add(13, Tuple.Create(77.82, 13.285));
            imageData.Add(14, Tuple.Create(81.301, 27.56));
            imageData.Add(15, Tuple.Create(92.896, 32.971));
            imageData.Add(16, Tuple.Create(95.341, 20.78));
            imageData.Add(17, Tuple.Create(79.398, 20.647));
            imageData.Add(18, Tuple.Create(92.759, 18.212));
            imageData.Add(19, Tuple.Create(93.05, 16.782));
            imageData.Add(20, Tuple.Create(93.076, 24.758));
            imageData.Add(21, Tuple.Create(95.163, 29.304));
            imageData.Add(22, Tuple.Create(95.167, 8.46));
            imageData.Add(23, Tuple.Create(78.075, 35.561));
            imageData.Add(24, Tuple.Create(86.7, 11.621));
            imageData.Add(25, Tuple.Create(78.635, 20.068));
            imageData.Add(26, Tuple.Create(71.86, 59.58));
            imageData.Add(27, Tuple.Create(92.909, 22.847));
            imageData.Add(28, Tuple.Create(35.436, 55.342));
            imageData.Add(29, Tuple.Create(63.66, 45.324));
            imageData.Add(30, Tuple.Create(56.168, 16.541));



            for (int i = 1; i <= 3; i++)
            {
                textb.Text = "";
                pic.Source = new BitmapImage(new Uri(this.BaseUri, "/Assets/" + i + ".bmp"));

                await Task.Delay(TimeSpan.FromSeconds(2));

                ImageEncodingProperties imgFormat = ImageEncodingProperties.CreateJpeg();

                // a file to save a photo
                StorageFile file = await ApplicationData.Current.LocalFolder.CreateFileAsync("Photo.jpg", CreationCollisionOption.ReplaceExisting);

                await _mediaCapture.CapturePhotoToStorageFileAsync(imgFormat, file);

                IRandomAccessStream imageStream = await file.OpenAsync(FileAccessMode.Read);

                double resul_val = 0.0;
                double resul_aro = 0.0;



                emotionresult = await emotionserviceclient.RecognizeAsync(imageStream.AsStream());

                if (emotionresult != null)
                {
                    try
                    {
                        var    score = emotionresult[0].Scores;
                        double max   = 0.0;
                        max = Math.Max(max, score.Anger);
                        max = Math.Max(max, score.Happiness);
                        max = Math.Max(max, score.Sadness);
                        max = Math.Max(max, score.Surprise);
                        max = Math.Max(max, score.Neutral);
                        max = Math.Max(max, score.Disgust);
                        max = Math.Max(max, score.Fear);
                        double[] emo = { score.Anger, score.Disgust, score.Fear, score.Happiness, score.Neutral, score.Surprise, score.Sadness };
                        if (max == emo[0])
                        {
                            textb.Text = "Your primary emotion is: Anger";
                        }
                        else
                        {
                            if (max == emo[1])
                            {
                                textb.Text = "Your primary emotion is: Disgust";
                            }
                            else
                            {
                                if (max == emo[2])
                                {
                                    textb.Text = "Your primary emotion is: Fear";
                                }
                                else
                                {
                                    if (max == emo[3])
                                    {
                                        textb.Text = "Your primary emotion is: Happiness";
                                    }
                                    else
                                    {
                                        if (max == emo[4])
                                        {
                                            textb.Text = "Your primary emotion is: Neutral";
                                        }
                                        else
                                        {
                                            if (max == emo[5])
                                            {
                                                textb.Text = "Your primary emotion is: Surprise";
                                            }
                                            else
                                            {
                                                textb.Text = "Your primary emotion is: Sadness";
                                            }
                                        }
                                    }
                                }
                            }
                        }



                        resul_val = (emoData["Anger"].Item1 * score.Anger +
                                     emoData["Happiness"].Item1 * score.Happiness +
                                     emoData["Sadness"].Item1 * score.Sadness +
                                     emoData["Surprise"].Item1 * score.Surprise +
                                     emoData["Neutral"].Item1 * score.Neutral +
                                     emoData["Disgust"].Item1 * score.Disgust +
                                     emoData["Fear"].Item1 * score.Fear) * 100 / 9;


                        resul_aro = (emoData["Anger"].Item2 * score.Anger +
                                     emoData["Happiness"].Item2 * score.Happiness +
                                     emoData["Sadness"].Item2 * score.Sadness +
                                     emoData["Surprise"].Item2 * score.Surprise +
                                     emoData["Neutral"].Item2 * score.Neutral +
                                     emoData["Disgust"].Item2 * score.Disgust +
                                     emoData["Fear"].Item2 * score.Fear) * 100 / 9;
                    }
                    catch (IndexOutOfRangeException) { textb.Text = "No face detected"; }


                    double val = imageData[i].Item1;
                    double aro = imageData[i].Item2;

                    val_res += (resul_val - val) / (30);
                    aro_res += (resul_aro - aro) / (30);



                    await Task.Delay(TimeSpan.FromSeconds(2));
                }
            }
            String ans = "";

            if (val_res < 35)
            {
                ans += "The quality of your response is normal";
            }
            else
            {
                if (val_res < 50)
                {
                    ans += "The quality of your response shows slight deviations from ideal values";
                }
                else
                {
                    ans += "The quality of your response is highly deviated from ideal values";
                }
            }
            if (aro_res < 35)
            {
                ans += "\nThe intensity of your response is normal";
            }
            else
            {
                if (aro_res < 50)
                {
                    ans += "\nThe intensity of your response shows slight deviations from ideal values";
                }
                else
                {
                    ans += "\nThe intensity of your response is highly deviated from ideal values";
                }
            }

            Frame.Navigate(typeof(result), ans);
        }
        public static Journal GetJournal(IRandomAccessStream stream)
        {
            if ( stream == null )
            {
                return null;
            }
            else
            {


                var Reader=new StreamReader(stream . AsStream());

                var Doc=XDocument . Parse(Reader . ReadToEnd());

                Journal journal;
                try
                {
                    journal = (from Jou in Doc . Descendants("Journal")
                               select new Journal
                               {
                                   Name = ( string ) Jou . Attribute("Name") ,
                                   ListOfAuthor = from Aut in Jou . Descendants("Author")
                                                  select new Author
                                                  {
                                                      FirstName = ( string ) Aut . Attribute("FirstName") ,
                                                      FamilyName = ( string ) Aut . Attribute("FamilyName") ,
                                                      EmailAddress = ( string ) Aut . Attribute("EmailAddress") ,
                                                      Introduction = ( string ) Aut . Attribute("Introduction")
                                                  } ,
                                   ListOfIssue = from Iss in Jou . Descendants("Issue")
                                                 select new Issue
                                                 {
                                                     Number = ( long ) Iss . Attribute("Number") ,
                                                     PublishTime = Convert . ToDateTime(( string ) Iss . Attribute("PublishTime")) ,
                                                     Price = ( decimal ) Iss . Attribute("Price") ,
                                                     ListOfArticle = from Art in Iss . Descendants("Article")
                                                                     select new Article
                                                                     {
                                                                         Title = ( string ) Art . Attribute("Title") ,
                                                                         TextLine = from Lin in Art . Descendants("Text") . FirstOrDefault() . Descendants("Line")
                                                                                    select ( string ) Lin . Attribute("Run") ,
                                                                         ListOfAuthorName = from Aut in Art . Descendants("AuthorName")
                                                                                            select ( string ) Aut . Attribute("FirstName") + " " + ( string ) Aut . Attribute("FamilyName") ,

                                                                     }
                                                 }


                               }) . First();
                }
                catch ( System . Exception )
                {
                    return null;
                }
                foreach ( var Aut in journal . ListOfAuthor )
                {
                    foreach ( var Iss in journal . ListOfIssue )
                    {
                        foreach ( var Art in Iss . ListOfArticle )
                        {
                            foreach ( var aut in Art . ListOfAuthor )
                            {
                                if ( Aut == aut )
                                {
                                    Aut . ArticleHaveContribute . Add(Art);
                                }
                            }
                        }
                    }

                }
                return journal;
            }
        }
 public Task <string> UploadPictureAsync(IRandomAccessStream stream)
 {
     return(UploadPictureAsync(stream.AsStream()));
 }
        public async void ContinueFileOpenPicker(FileOpenPickerContinuationEventArgs args)
        {
            //Import data
            if ((args.ContinuationData["Operation"] as string) == "UpdateDatabase" &&
                args.Files != null &&
                args.Files.Count > 0)
            {
                StorageFile file = args.Files[0];

                if (file.Name.EndsWith("poyo"))
                {
                    IRandomAccessStream fileStream = await file.OpenAsync(FileAccessMode.Read);

                    DataManager.ConnectToDatabase();

                    List <Lesson> lessons;

                    int kanjiRound;
                    int vocabRound;

                    String importStatus = DataManager.ImportFromFile(fileStream.AsStream(), out lessons, out vocabRound, out kanjiRound);

                    if (MessageBox.Show(importStatus, "Import", MessageBoxButton.OKCancel) == MessageBoxResult.OK)
                    {
                        String updateStatus = DataManager.UpdateDatabase(lessons);

                        AppSettings.VocabRound = vocabRound;
                        AppSettings.KanjiRound = kanjiRound;

                        AppSettings.SaveSettings();

                        MessageBox.Show(updateStatus);
                    }
                    else
                    {
                        MessageBox.Show("Import Abgebrochen");
                    }
                    DataManager.CloseConnection();

                    fileStream.Dispose();
                }
            }
            //Add New Content
            else if ((args.ContinuationData["Operation"] as string) == "AddContent" &&
                     args.Files != null &&
                     args.Files.Count > 0)
            {
                StorageFile file = args.Files[0];

                if (file.Name.EndsWith("nya"))
                {
                    IRandomAccessStream fileStream = await file.OpenAsync(FileAccessMode.Read);

                    DataManager.ConnectToDatabase();
                    String updateStatus = DataManager.AddContentFromFile(fileStream.AsStream());
                    DataManager.CloseConnection();

                    fileStream.Dispose();

                    MessageBox.Show(updateStatus);
                }
            }

            updateDatabase = false;
        }
Exemple #21
0
        private async void button_addface_Click(object sender, RoutedEventArgs e)
        {
            //Disable all buttons while processing
            button_addface.IsEnabled  = false;
            button_addPhoto.IsEnabled = false;

            //Upload function
            var storageUri = await storageClient.uploadPhotoAsync(AppSettings.defaultPersonGroupID, Guid.NewGuid().ToString() + ".png", stream.AsStream());

            //If exception is returned, display error message
            if (storageUri is Exception)
            {
                var ex = storageUri as Exception;
                msg.Title   = "Unable to upload photo!";
                msg.Content = ex.Message;
                await msg.ShowAsync();
            }
            else if (storageUri as string != "")
            {
                //Add person face using Face API

                //Upload to Azure Blob returns a Uri reference to the image
                string Uri = storageUri.ToString();

                //Check if this face has already been registered with another user
                bool faceExist = await CheckFaceExistsAsync(Uri);

                //If the face has not been registered with another account then add face to the person
                if (!faceExist)
                {
                    //Get Person ID of the logged in user from local settings
                    var personId = localSettings.Values["PersonId"].ToString();

                    //Add Person Face to this Person
                    var result = await faceClient.CreatePersonFaceAsync(AppSettings.defaultPersonGroupID, personId, Uri);

                    if (result.IsSuccessStatusCode)
                    {
                        var responseContent = await result.Content.ReadAsStringAsync();

                        //Deserialize response to PersistedFace object
                        var persistedFace = JsonConvert.DeserializeObject <PersistedFace>(responseContent);

                        if (persistedFace != null)
                        {
                            msg.Title   = "Success!";
                            msg.Content = "Face registered successfully";

                            //Upload the registered image to Azure blob storage, with the persisted ID
                            await storageClient.uploadPhotoAsync(AppSettings.defaultPersonGroupID, persistedFace.persistedFaceId + ".png", stream.AsStream());

                            await msg.ShowAsync();

                            //Reset image control
                            PhotoControl.Source = null;

                            //Save user face details to face table in Azure
                            var res = await storageClient.AddUserFaceAsync(localSettings.Values["UserName"].ToString(), localSettings.Values["PersonId"].ToString(), Uri, persistedFace.persistedFaceId);
                        }
                    }
                    else
                    {
                        msg.Title   = "Unable to register face!";
                        msg.Content = await result.Content.ReadAsStringAsync();

                        await msg.ShowAsync();
                    }
                }
            }

            //Train Person Group after adding photo
            await faceClient.TrainPersonGroupAsync(AppSettings.defaultPersonGroupID);

            //Enable all buttons
            button_addface.IsEnabled  = true;
            button_addPhoto.IsEnabled = true;

            //Display last three photos in image control
            DisplayLatestPhotos();
        }
Exemple #22
0
        private async void btnCheckEmotion_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                txtFinalEmotion.Text = "";
                // txtEmotionInfo.Text = "";

                //if( txtEmotionInfo.Text != null || txtFinalEmotion.Text != null )
                //{
                //    txtFinalEmotion.Text = "Please Click Another Photograph";
                //    txtEmotionInfo.Text = "";
                //    imgFace.Source = null;
                //    return;
                //}

                if (imageStream == null)
                {
                    txtFinalEmotion.Text = "Please Click Another Photograph";
                    txtEmotionInfo.Text  = "";
                    imgFace.Source       = null;
                    return;
                }

                // Raw Image Stream as API needs a raw Image Stream
                emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                if (emotionResult != null)
                {
                    // emotionResult will contain info. of all the faces & their Emotion
                    // But we need only 1 Image so we select emotionResult[0]

                    // Scores contain all the different Emotions like anger, sadness etc.

                    if (emotionResult.Length == 0)
                    {
                        txtFinalEmotion.Text      = "NO Emotions Returned :(";
                        btnCheckEmotion.IsEnabled = false;
                    }



                    Scores        arrEmotion    = emotionResult[0].Scores;
                    StringBuilder stringBuilder = new StringBuilder(" Your Emotions Are : \n ");

                    stringBuilder.Append("Happiness : " + arrEmotion.Happiness + "\n");
                    stringBuilder.Append("Anger : " + arrEmotion.Anger + "\n");
                    stringBuilder.Append("Contempt : " + arrEmotion.Contempt + "\n");
                    stringBuilder.Append("Disgust : " + arrEmotion.Disgust + "\n");
                    stringBuilder.Append("Fear : " + arrEmotion.Fear + "\n");
                    stringBuilder.Append("Surprise : " + arrEmotion.Surprise + "\n");
                    stringBuilder.Append("Neutral : " + arrEmotion.Neutral + "\n");
                    stringBuilder.Append("Sadness : " + arrEmotion.Sadness + "\n");

                    txtEmotionInfo.Text = stringBuilder.ToString();

                    StringBuilder stringBuilder2 = new StringBuilder("The Emotion you are feeling is : ");

                    if (arrEmotion.Happiness >= 0.4)
                    {
                        stringBuilder2.Append(" Happy ");
                    }
                    if (arrEmotion.Anger >= 0.4)
                    {
                        stringBuilder2.Append(" Angry ");
                    }
                    if (arrEmotion.Contempt >= 0.4)
                    {
                        stringBuilder2.Append(" Contempt ");
                    }
                    if (arrEmotion.Disgust >= 0.4)
                    {
                        stringBuilder2.Append(" Disgust ");
                    }
                    if (arrEmotion.Fear >= 0.4)
                    {
                        stringBuilder2.Append(" Scared ");
                    }
                    if (arrEmotion.Surprise >= 0.4)
                    {
                        stringBuilder2.Append(" Surprised ");
                    }
                    if (arrEmotion.Neutral >= 0.4)
                    {
                        stringBuilder2.Append(" Neutral ");
                    }
                    if (arrEmotion.Sadness >= 0.4)
                    {
                        stringBuilder2.Append(" Sad ");
                    }

                    txtFinalEmotion.Text = stringBuilder2.ToString();
                    // imgFace.Source = null;
                    imageStream = null;
                    photo       = null;
                    btnCheckEmotion.IsEnabled = false;
                }
            }
            catch (HttpRequestException e2)
            {
                txtFinalEmotion.Text = "Issue with Establishing a Connection with the API";
                txtEmotionInfo.Text  = "Error Returning the Emotion";
            }
            catch (Exception e1)
            {
                txtEmotionInfo.Text = "Error Returning the Emotion";
            }
        }
        /// <summary>
        /// 读取wav文件
        /// </summary>
        /// <param name="filename"></param>
        private async Task <wav> WaveAccess(string filename)
        {
            try
            {
                byte[] riff       = new byte[4];
                byte[] riffSize   = new byte[4];
                byte[] waveID     = new byte[4];
                byte[] junkID     = new byte[4];
                bool   hasjunk    = false;
                byte[] junklength = new byte[4];

                byte[] fmtID           = new byte[4];
                byte[] cksize          = new byte[4];
                uint   waveType        = 0;
                byte[] channel         = new byte[2];
                byte[] sample_rate     = new byte[4];
                byte[] bytespersec     = new byte[4];
                byte[] blocklen_sample = new byte[2];
                byte[] bitNum          = new byte[2];
                byte[] unknown         = new byte[2];
                byte[] dataID          = new byte[4]; //52
                byte[] dataLength      = new byte[4]; //56 个字节

                //string longFileName = filepath;

                //FileStream fstream = new FileStream(filename, FileMode.Open);
                //Windows.Storage.StorageFolder s = Windows.ApplicationModel.Package.Current.InstalledLocation;
                //FileStream fs;

                //StorageFolder storageFolder = Package.Current.InstalledLocation;
                //StorageFolder storageFolder = ApplicationData.Current.LocalFolder;

                //StorageFile storageFile = await storageFolder.GetFileAsync(filename);

                //IRandomAccessStream fileStream = await storageFile.OpenAsync(FileAccessMode.Read);
                //Stream s = fileStream.AsStream();
                //Stream s = fs.ReadAsync

                StorageFolder storageFolder = ApplicationData.Current.LocalFolder;

                StorageFile storageFile = await storageFolder.GetFileAsync(filename);

                IRandomAccessStream fileStream = await storageFile.OpenAsync(FileAccessMode.Read);

                Stream s = fileStream.AsStream();



                BinaryReader bread = new BinaryReader(s);
                //BinaryReader bread = new BinaryReader(fs);
                riff = bread.ReadBytes(4); // RIFF



                if (BitConverter.ToUInt32(bytesReserve(riff), 0) != 0x52494646)
                {
                    Exception e = new Exception("该文件不是WAVE文件");
                    throw e;
                }

                riffSize = bread.ReadBytes(4); // 文件剩余长度

                if (BitConverter.ToUInt32(riffSize, 0) != bread.BaseStream.Length - bread.BaseStream.Position)
                {
                    //Exception e = new Exception("该WAVE文件损坏,文件长度与标记不一致");
                    //throw e;
                }

                waveID = bread.ReadBytes(4);

                if (BitConverter.ToUInt32(bytesReserve(waveID), 0) != 0x57415645)
                {
                    Exception e = new Exception("该文件不是WAVE文件");
                    throw e;
                }

                byte[] tmp = bread.ReadBytes(4);

                if (BitConverter.ToUInt32(bytesReserve(tmp), 0) == 0x4A554E4B)
                {
                    //包含junk标记的wav
                    junkID     = tmp;
                    hasjunk    = true;
                    junklength = bread.ReadBytes(4);
                    uint junklen = BitConverter.ToUInt32(junklength, 0);
                    //将不要的junk部分读出
                    bread.ReadBytes((int)junklen);

                    //读fmt 标记
                    fmtID = bread.ReadBytes(4);
                }
                else if (BitConverter.ToUInt32(bytesReserve(tmp), 0) == 0x666D7420)
                {
                    fmtID = tmp;
                }
                else
                {
                    Exception e = new Exception("无法找到WAVE文件的junk和fmt标记");
                    throw e;
                }



                if (BitConverter.ToUInt32(bytesReserve(fmtID), 0) != 0x666D7420)
                {
                    //fmt 标记
                    Exception e = new Exception("无法找到WAVE文件fmt标记");
                    throw e;
                }

                cksize = bread.ReadBytes(4);
                uint p_data_start = BitConverter.ToUInt32(cksize, 0);
                int  p_wav_start  = (int)p_data_start + 8;

                waveType = bread.ReadUInt16();

                if (waveType != 1)
                {
                    // 非pcm格式,暂不支持
                    Exception e = new Exception("WAVE文件不是pcm格式,暂时不支持");
                    throw e;
                }

                //声道数
                channel = bread.ReadBytes(2);

                //采样频率
                sample_rate = bread.ReadBytes(4);
                int fs = (int)BitConverter.ToUInt32(sample_rate, 0);

                //每秒钟字节数
                bytespersec = bread.ReadBytes(4);

                //每次采样的字节大小,2为单声道,4为立体声道
                blocklen_sample = bread.ReadBytes(2);

                //每个声道的采样精度,默认16bit
                bitNum = bread.ReadBytes(2);

                tmp = bread.ReadBytes(2);
                //寻找da标记
                while (BitConverter.ToUInt16(bytesReserve(tmp), 0) != 0x6461)
                {
                    tmp = bread.ReadBytes(2);
                }
                tmp = bread.ReadBytes(2);

                if (BitConverter.ToUInt16(bytesReserve(tmp), 0) != 0x7461)
                {
                    //ta标记
                    Exception e = new Exception("无法找到WAVE文件data标记");
                    throw e;
                }

                //wav数据byte长度
                uint DataSize = bread.ReadUInt32();
                //计算样本数
                long NumSamples = (long)DataSize / 2;

                if (NumSamples == 0)
                {
                    NumSamples = (bread.BaseStream.Length - bread.BaseStream.Position) / 2;
                }
                //if (BitConverter.ToUInt32(notDefinition, 0) == 18)
                //{
                //    unknown = bread.ReadBytes(2);
                //}
                //dataID = bread.ReadBytes(4);

                Int16[] data = new Int16[NumSamples];

                for (int i = 0; i < NumSamples; i++)
                {
                    //读入2字节有符号整数
                    data[i] = bread.ReadInt16();
                }

                s.Dispose();
                //fstream.Close();
                //fstream.Dispose();
                bread.Dispose();

                wav wave = new wav();
                wave.wavs = data;
                wave.fs   = fs;
                return(wave);
            }
            catch (System.Exception ex)
            {
                //return null;
                throw ex;
            }
        }
Exemple #24
0
        private async void getEmotion_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                if (emotionResult != null)
                {
                    Scores score = emotionResult[0].Scores;
                    output.Text = "Your emotions are: \n" +
                                  "Happiness: " + score.Happiness + "\n" +
                                  "Sadness: " + score.Sadness + "\n" +
                                  "Surprise: " + score.Surprise + "\n" +
                                  "Fear: " + score.Fear + "\n" +
                                  "Anger:" + score.Anger + "\n" +
                                  "Contempt:" + score.Contempt + "\n" +
                                  "Disgust:" + score.Disgust + "\n" +
                                  "Neutral:" + score.Neutral + "\n";
                    if (score.Happiness > score.Sadness && score.Happiness > score.Surprise && score.Happiness > score.Fear && score.Happiness > score.Anger && score.Happiness > score.Contempt && score.Happiness > score.Disgust &&
                        score.Happiness > score.Neutral)
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are Happy");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                        media1.Play();
                    }
                    else if (score.Sadness > score.Happiness && score.Sadness > score.Surprise && score.Sadness > score.Fear && score.Sadness > score.Anger && score.Sadness > score.Contempt && score.Sadness > score.Disgust &&
                             score.Sadness > score.Neutral)
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are Sad");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                        media2.Play();
                    }
                    else if (score.Surprise > score.Happiness && score.Surprise > score.Sadness && score.Surprise > score.Fear && score.Surprise > score.Anger && score.Surprise > score.Contempt && score.Surprise > score.Disgust &&
                             score.Surprise > score.Neutral)
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are Surprised");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                        media4.Play();
                    }
                    else if (score.Fear > score.Happiness && score.Fear > score.Sadness && score.Fear > score.Surprise && score.Fear > score.Anger && score.Fear > score.Contempt && score.Fear > score.Disgust &&
                             score.Fear > score.Neutral)
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are Frightened");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                        media5.Play();
                    }

                    else if (score.Anger > score.Happiness && score.Anger > score.Sadness && score.Anger > score.Fear && score.Anger > score.Surprise && score.Anger > score.Contempt && score.Anger > score.Disgust &&
                             score.Anger > score.Neutral)
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are Angry");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                    }
                    else if (score.Contempt > score.Happiness && score.Contempt > score.Sadness && score.Contempt > score.Fear && score.Contempt > score.Anger && score.Contempt > score.Surprise && score.Contempt > score.Disgust &&
                             score.Contempt > score.Neutral)
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are Have feeling of contemped");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                    }

                    else if (score.Disgust > score.Happiness && score.Disgust > score.Sadness && score.Disgust > score.Fear && score.Disgust > score.Anger && score.Disgust > score.Contempt && score.Disgust > score.Surprise &&
                             score.Disgust > score.Neutral)
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are feeling disgusting");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                    }

                    else
                    {
                        SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                        SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("You are felling neutral");


                        //set source and start playing in syn audio stream.
                        media.AutoPlay = true;
                        media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                        media.Play();
                        media3.Play();
                    }
                }
            }
            catch
            {
                SpeechSynthesizer     synthesizer     = new SpeechSynthesizer();
                SpeechSynthesisStream SynthesisStream = await synthesizer.SynthesizeTextToStreamAsync("There is an Error Please Click the picture agaian");


                //set source and start playing in syn audio stream.
                media.AutoPlay = true;
                media.SetSource(SynthesisStream, SynthesisStream.ContentType);
                media.Play();
            }
        }
        public static async Task<bool> addImages(IRandomAccessStream filestream)
        {
            bool success = false;


            string serviceURL = "http://40.76.6.186:8888/member_register/shin";
            string boundary = "---------------------------" + DateTime.Now.Ticks.ToString("x");

            success = false;

            //Rest request
            HttpClient restClient = new HttpClient();
            restClient.BaseAddress = new Uri("http://40.76.6.186:8888/member_register/shin");
            restClient.DefaultRequestHeaders.TryAddWithoutValidation("Content-Type", "multipart/form-data; boundary=" + boundary);


            //falta la autenticacion
            // setAuthorization(restClient, service, WEBSERVICE_REQUEST_TYPE_POST);

            // This is the postdata
            MultipartFormDataContent content = new MultipartFormDataContent(boundary);
            content.Add(new StringContent(boundary));
            StringContent textPart = new StringContent("1234", Encoding.UTF8);
            content.Add(textPart, "project");

            StreamContent imagePart = new StreamContent(filestream.AsStream());
            imagePart.Headers.Add("Content-Type", "image/jpeg");
            content.Add(imagePart, "profile_picture", "111");


            HttpRequestMessage req = new HttpRequestMessage(HttpMethod.Post, serviceURL);
            req.Content = content;
            HttpResponseMessage response = null;
            string responseBodyAsText = "";

            try
            {
                response = await restClient.SendAsync(req);
                response.EnsureSuccessStatusCode();
                responseBodyAsText = await response.Content.ReadAsStringAsync();
                if (response.StatusCode == HttpStatusCode.Created)
                {

                    success = true;
                }
            }
            catch (Exception e)
            {
                string err = e.Message;
            }


            return success;
        }
 private async Task CreateTempFile()
 {
     tempFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(Guid.NewGuid() + ".tmp", CreationCollisionOption.GenerateUniqueName);
     rtStream = await tempFile.OpenAsync(FileAccessMode.ReadWrite);
     var newStream = rtStream.AsStream();
     var position = currentStream.Position;
     currentStream.Position = 0;
     await currentStream.CopyToAsync(newStream);
     newStream.Position = position;
     currentStream.Dispose();
     currentStream = newStream;
 }
Exemple #27
0
        public Stream OpenWrite(string path)
        {
            IRandomAccessStream streamWithContentType = OpenWriteStream(path);

            return(streamWithContentType.AsStream());
        }
        private async void generate_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                if (emotionResult != null)
                {
                    string[] emo =
                    {
                        "Happy",
                        "Sad",
                        "Surprise",
                        "Anger",
                        "Fear",
                        "Contemp",
                        "Disgust",
                        "Neutral"
                    };



                    double[] anArray = { emotionResult[0].Scores.Happiness, emotionResult[0].Scores.Sadness, emotionResult[0].Scores.Surprise, emotionResult[0].Scores.Fear, emotionResult[0].Scores.Anger, emotionResult[0].Scores.Contempt, emotionResult[0].Scores.Disgust, emotionResult[0].Scores.Neutral };
                    // Finding max

                    double large = anArray.Max();

                    int p = Array.IndexOf(anArray, large);


                    confidencecheck.Text = "Highest : " + emo[p];
                    string myemo = emo[p];


                    if (myemo == emotion_u)
                    {
                        ar          += 20;
                        myscore.Text = ar.ToString();
                    }


                    /*List<GraphHR2> lst2 = new List<GraphHR2>();
                     * //fix this, shift heart rate and stop watch time
                     * lst2.Add(new GraphHR2 { Emotion = "Happiness", Values = (emotionResult[0].Scores.Happiness) * 100 });
                     * lst2.Add(new GraphHR2 { Emotion = "Sadness", Values = (emotionResult[0].Scores.Sadness) * 100 });
                     * lst2.Add(new GraphHR2 { Emotion = "Surprise", Values = (emotionResult[0].Scores.Surprise) * 100 });
                     * lst2.Add(new GraphHR2 { Emotion = "Fear", Values = (emotionResult[0].Scores.Fear) * 100 });
                     * lst2.Add(new GraphHR2 { Emotion = "Anger", Values = (emotionResult[0].Scores.Anger) * 100 });
                     * lst2.Add(new GraphHR2 { Emotion = "Contempt", Values = (emotionResult[0].Scores.Contempt) * 100 });
                     * lst2.Add(new GraphHR2 { Emotion = "Disgust", Values = (emotionResult[0].Scores.Disgust) * 100 });
                     * lst2.Add(new GraphHR2 { Emotion = "Neutral", Values = (emotionResult[0].Scores.Neutral) * 100 });
                     * (ColumnChart.Series[0] as ColumnSeries).ItemsSource = lst2;
                     * (PieChart2.Series[0] as PieSeries).ItemsSource = lst2;
                     *
                     *
                     *
                     *
                     * if (emo[p] == "Neutral")
                     * {
                     *  steps.Visibility = Visibility.Collapsed;
                     *
                     *  confidencecheck.Text = "These type of people are hard to deal with.\n Try to deal with them in a funny way.\nPass a joke and give it a try.";
                     *  Talk(confidencecheck.Text);
                     * }
                     * else if (emo[p] == "Surprise")
                     * {
                     *  steps.Visibility = Visibility.Visible;
                     *  image3.Visibility = Visibility.Collapsed;
                     *  steps.Text = "";
                     *  Talk(steps.Text);
                     * }
                     * else if (emo[p] == "Happiness")
                     * {
                     *  steps.Visibility = Visibility.Visible;
                     *  image3.Visibility = Visibility.Collapsed;
                     *  steps.Text = "";
                     *  Talk(steps.Text);
                     * }
                     * else if (emo[p] == "Fear")
                     * {
                     *  steps.Visibility = Visibility.Visible;
                     *  image3.Visibility = Visibility.Collapsed;
                     *  steps.Text = "";
                     *  Talk(steps.Text);
                     * }
                     * else if (emo[p] == "Angry")
                     * {
                     *  steps.Visibility = Visibility.Visible;
                     *  image3.Visibility = Visibility.Collapsed;
                     *  steps.Text = "";
                     *  Talk(steps.Text);
                     * }
                     * else if (emo[p] == "Sadness")
                     * {
                     *  steps.Visibility = Visibility.Visible;
                     *  image3.Visibility = Visibility.Collapsed;
                     *  steps.Text = "";
                     *  Talk(steps.Text);
                     * }
                     * else if (emo[p] == "Contempt")
                     * {
                     *  steps.Visibility = Visibility.Visible;
                     *  image3.Visibility = Visibility.Collapsed;
                     *  steps.Text = "";
                     *  Talk(steps.Text);
                     * }
                     * else if (emo[p] == "Disgust")
                     * {
                     *  steps.Visibility = Visibility.Visible;
                     *  image3.Visibility = Visibility.Collapsed;
                     *  steps.Text = "";
                     *  Talk(steps.Text);
                     * }*/
                }
            }
            catch
            {
                confidencecheck.Text = "Error returning the emotion";
            }
        }
        private async void OnSelectImageButtonClicked(object sender, RoutedEventArgs e)
        {
            var picker = new FileOpenPicker();

            picker.ViewMode = PickerViewMode.Thumbnail;
            picker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
            picker.FileTypeFilter.Add(".jpg");
            picker.FileTypeFilter.Add(".jpeg");
            picker.FileTypeFilter.Add(".png");

            var file = await picker.PickSingleFileAsync();

            if (file != null)
            {
                using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read))
                {
                    // Delete existing face rectangles
                    DeleteFaceRectangles();

                    // Show the image
                    var image = new BitmapImage();
                    await image.SetSourceAsync(stream);

                    LoadedImage.Source = image;

                    Container.UpdateLayout();
                    var ratio = LoadedImage.ActualHeight / image.PixelHeight;

                    Progress.IsActive  = true;
                    Overlay.Visibility = Visibility.Visible;

                    try
                    {
                        // Submit the image to the Face API
                        FaceClient client = new FaceClient(
                            new ApiKeyServiceClientCredentials(_key),
                            new System.Net.Http.DelegatingHandler[] { }
                            );

                        client.Endpoint = _uri;

                        IList <FaceAttributeType> attributes = new FaceAttributeType[]
                        {
                            FaceAttributeType.Gender,
                            FaceAttributeType.Age,
                            FaceAttributeType.Emotion,
                            FaceAttributeType.Glasses,
                            FaceAttributeType.FacialHair
                        };

                        stream.Seek(0L);
                        var faces = await client.Face.DetectWithStreamAsync(stream.AsStream(), true, false, attributes);

                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;

                        foreach (var face in faces)
                        {
                            // Highlight the face with a Rectangle
                            var rect = new Rectangle();
                            rect.Width  = face.FaceRectangle.Width * ratio;
                            rect.Height = face.FaceRectangle.Height * ratio;

                            var x = (face.FaceRectangle.Left * ratio) + ((Container.ActualWidth - LoadedImage.ActualWidth) / 2.0);
                            var y = (face.FaceRectangle.Top * ratio) + ((Container.ActualHeight - LoadedImage.ActualHeight) / 2.0);
                            rect.Margin = new Thickness(x, y, 0, 0);
                            rect.HorizontalAlignment = HorizontalAlignment.Left;
                            rect.VerticalAlignment   = VerticalAlignment.Top;

                            rect.Fill            = new SolidColorBrush(Colors.Transparent);
                            rect.Stroke          = new SolidColorBrush(Colors.Red);
                            rect.StrokeThickness = 2.0;
                            rect.Tag             = face.FaceId;

                            rect.PointerEntered += (s, args) =>
                            {
                                // Change the rectangle border to yellow when the pointer enters it
                                rect.Stroke = new SolidColorBrush(Colors.Yellow);
                            };

                            rect.PointerExited += (s, args) =>
                            {
                                // Change the rectangle border to red when the pointer exits it
                                rect.Stroke = new SolidColorBrush(Colors.Red);
                            };

                            rect.PointerPressed += async(s, args) =>
                            {
                                // Display information about a face when it is clicked
                                var id       = (Guid)((Rectangle)s).Tag;
                                var selected = faces.Where(f => f.FaceId == id).First();

                                var gender    = selected.FaceAttributes.Gender;
                                var age       = selected.FaceAttributes.Age;
                                var beard     = selected.FaceAttributes.FacialHair.Beard > 0.50 ? "Yes" : "No";
                                var moustache = selected.FaceAttributes.FacialHair.Moustache > 0.50 ? "Yes" : "No";
                                var glasses   = selected.FaceAttributes.Glasses;

                                // Use reflection to enumerate Emotion properties
                                var props = selected.FaceAttributes.Emotion.GetType()
                                            .GetProperties()
                                            .Where(pi => pi.PropertyType == typeof(double) && pi.GetGetMethod() != null)
                                            .Select(pi => new
                                {
                                    pi.Name,
                                    Value = (double)pi.GetGetMethod().Invoke(selected.FaceAttributes.Emotion, null)
                                });

                                // Determine the dominant emotion
                                var max     = props.Max(p => p.Value);
                                var emotion = props.Single(p => p.Value == max).Name;

                                // Show the results
                                var message = $"Gender: {gender}\nAge: {age}\nBeard: { beard}\nMoustache: {moustache}\nGlasses: {glasses}\nEmotion: {emotion}";
                                await new MessageDialog(message).ShowAsync();
                            };

                            Container.Children.Add(rect);
                        }
                    }
                    catch (Exception ex)
                    {
                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;
                        await new MessageDialog(ex.Message).ShowAsync();
                    }
                    finally
                    {
                        Progress.IsActive  = false;
                        Overlay.Visibility = Visibility.Collapsed;
                    }
                }
            }
        }
Exemple #30
0
        public async void Open(string fileName)
        {
            var file = await KnownFolders.MusicLibrary.CreateFileAsync( fileName, CreationCollisionOption.ReplaceExisting );
            stream = await file.OpenAsync( FileAccessMode.ReadWrite );

            binaryWriter = new BinaryWriter( stream.AsStream() );

            dataSize = 0;
            WriteWaveHeader( 0 );

            disposed = false;
        }
Exemple #31
0
 public static async Task <ImageFormat> DetectImageFormat(this IRandomAccessStream randomAccessStream)
 {
     await using var stream = randomAccessStream.AsStream();
     using var image        = Image.FromStream(stream);
     return(image.RawFormat);
 }
Exemple #32
0
        private async void getEmotion_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                emotionResults = await emotionService.RecognizeAsync(imageStream.AsStream());

                faceRects = await UploadAndDetectFaces(photo.Path, photo);

                //Desenho um quadrado nas posicoes identificadas do rosto
                var property = await photo.Properties.GetImagePropertiesAsync();

                var writeableBmp = BitmapFactory.New((int)property.Width, (int)property.Height);

                using (writeableBmp.GetBitmapContext())
                {
                    //Load bitmap from image file
                    using (var fileStream = await photo.OpenAsync(FileAccessMode.Read))
                    {
                        writeableBmp = await BitmapFactory.New(1, 1).FromStream(fileStream, BitmapPixelFormat.Bgra8);
                    }
                }
                if (faceRects != null && faceRects.Length > 0)
                {
                    foreach (var face in faceRects)
                    {
                        writeableBmp.DrawRectangle(face.Left, face.Top,
                                                   face.Left + face.Width,
                                                   face.Top + face.Height, Colors.Yellow);
                    }
                    debug.Text = "Rostos identificados: " + faceRects.Length;
                }
                //Sobrescreve com uma nova contendo as marcacoes
                image.Source = writeableBmp;

                output.Text = "";
                if (emotionResults != null)
                {
                    int i = 0;
                    foreach (var p in emotionResults)
                    {
                        var score = p.Scores;
                        output.Text += "Your Emotions are for photo #" + i + "  : \n" +

                                       "Feliz: " + String.Format("{0:0.##}", score.Happiness * 100) + " %" + "\n" +

                                       "Tristeza: " + String.Format("{0:0.##}", score.Sadness * 100) + " %" + "\n" +

                                       "Surpreso: " + String.Format("{0:0.##}", score.Surprise * 100) + " %" + "\n" +

                                       "Raiva: " + String.Format("{0:0.##}", score.Anger * 100) + " %" + "\n" +

                                       "Desprezo: " + String.Format("{0:0.##}", score.Contempt * 100) + " %" + "\n" +

                                       "Desgosto: " + String.Format("{0:0.##}", score.Disgust * 100) + " %" + "\n" +

                                       "Medo: " + String.Format("{0:0.##}", score.Fear * 100) + " %" + "\n" +

                                       "Neutro: " + String.Format("{0:0.##}", score.Neutral * 100) + " %" + "\n";
                        i++;
                    }
                }
            }
            catch (Exception ex)
            {
                output.Text = "Erro: Check Emotions \n" + ex.Message + "\n";
            }
        }
Exemple #33
0
        async Task finishEnrollment()
        {
            if (btnRecordEnroll.IsEnabled == false)
            {
                return;                                     // if user clicks and then comes timer event
            }
            btnRecordEnroll.Content   = "Start record enrollment";
            btnRecordEnroll.IsEnabled = false;
            await CaptureMedia.StopRecordAsync();

            Stream str = AudioStream.AsStream();

            str.Seek(0, SeekOrigin.Begin);


            _speakerId = Guid.Parse((lbProfiles.SelectedItem as ListBoxItem).Content.ToString());

            OperationLocation processPollingLocation;

            try
            {
                processPollingLocation = await _serviceClient.EnrollAsync(str, _speakerId);
            }
            catch (EnrollmentException vx)
            {
                txtInfo.Text = vx.Message;
                CleanAfter();
                return;
            }
            catch (Exception vx)
            {
                txtInfo.Text = vx.Message;
                CleanAfter();
                return;
            }


            EnrollmentOperation enrollmentResult = null;
            int      numOfRetries       = 10;
            TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0);

            while (numOfRetries > 0)
            {
                await Task.Delay(timeBetweenRetries);

                enrollmentResult = await _serviceClient.CheckEnrollmentStatusAsync(processPollingLocation);

                if (enrollmentResult.Status == Status.Succeeded)
                {
                    break;
                }
                else if (enrollmentResult.Status == Status.Failed)
                {
                    txtInfo.Text = enrollmentResult.Message;
                    CleanAfter();
                    return;
                }
                numOfRetries--;
            }

            if (numOfRetries <= 0)
            {
                txtInfo.Text = "Identification operation timeout";
            }
            else
            {
                txtInfo.Text = "Enrollment done. " + enrollmentResult.Status + Environment.NewLine + " Remaining Speech Time " + enrollmentResult.ProcessingResult.RemainingEnrollmentSpeechTime;
            }

            CleanAfter();
        }
Exemple #34
0
        private async void takePhoto_Click(object sender, RoutedEventArgs e)
        {
            // camera code to handle capture, storage and display of photo
            try
            {
                photo = await captureUI.CaptureFileAsync(CameraCaptureUIMode.Photo);

                if (photo == null)
                {
                    return;
                }
                else
                {
                    imageStream = await photo.OpenAsync(FileAccessMode.Read);

                    BitmapDecoder decoder = await BitmapDecoder.CreateAsync(imageStream);

                    SoftwareBitmap softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                    SoftwareBitmap       softwareBitmapBGR8 = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                    SoftwareBitmapSource bitmapSource       = new SoftwareBitmapSource();
                    await bitmapSource.SetBitmapAsync(softwareBitmapBGR8);

                    image.Source = bitmapSource;
                }
            }
            catch (Exception)
            {
                textBlock.Text = "Error taking the picture";
                //throw;
            }

            // emotion code, grabs elements returned from emotionservice and displays results
            try
            {
                emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                if (emotionResult != null)
                {
                    EmotionScores scores = emotionResult[0].Scores;
                    textBlock.Text = "According to a robot, your emotions are: \n\n" +
                                     "Happiness: " + scores.Happiness + "\n" +
                                     "Sadness: " + scores.Sadness + "\n" +
                                     "Fear: " + scores.Fear + "\n" +
                                     "Neutral: " + scores.Neutral;

                    if (scores.Neutral > 0.5)
                    {
                        textBlock.Text += "\n\nRobot says: \"Ah, I see we are more similar than I had thought, human.\"";
                    }
                    else
                    {
                        textBlock.Text += "\n\nRobot says: \"Your emotions will be your undoing, foolish bag of mostly water.\"";
                    }
                }
            }
            catch (Exception)
            {
                textBlock.Text = "Error! Emotions not found. Bleep bloop.";
                //throw;
            }
        }
Exemple #35
0
        private async void button_Click(object sender, RoutedEventArgs e)
        {
            int   i, t;
            float min;

            try
            {
                photo = await captureUI.CaptureFileAsync(CameraCaptureUIMode.Photo);

                if (photo == null)
                {
                    return;
                }
                else
                {
                    imageStream = await photo.OpenAsync(FileAccessMode.Read);

                    emotionResult = await emotionServiceClient.RecognizeAsync(imageStream.AsStream());

                    if (emotionResult != null)
                    {
                        Scores  score = emotionResult[0].Scores;
                        float[] a     = new float[7] {
                            score.Happiness, score.Sadness, score.Fear, score.Anger, score.Contempt, score.Disgust, score.Neutral
                        };
                        t   = 0;
                        min = 0;
                        i   = 0;
                        while (i < 6)
                        {
                            if (a[i] > min)
                            {
                                min = a[i];
                                t   = i;
                            }
                            i++;
                        }
                        switch (t)
                        {
                        case 0:
                            BitmapImage happiness =
                                new BitmapImage(new Uri("ms-appx:///Assets/happiness.jpg"));
                            image.Source = happiness;
                            break;

                        case 1:
                            BitmapImage sadness =
                                new BitmapImage(new Uri("ms-appx:///Assets/sadness.jpg"));
                            image.Source = sadness;
                            break;

                        case 2:
                            BitmapImage fear =
                                new BitmapImage(new Uri("ms-appx:///Assets/fear.jpg"));
                            image.Source = fear;
                            break;

                        case 3:
                            BitmapImage anger =
                                new BitmapImage(new Uri("ms-appx:///Assets/angry.jpg"));
                            image.Source = anger;
                            break;

                        case 4:
                            BitmapImage contempt =
                                new BitmapImage(new Uri("ms-appx:///Assets/contempt.jpg"));
                            image.Source = contempt;
                            break;

                        case 5:
                            BitmapImage disgust =
                                new BitmapImage(new Uri("ms-appx:///Assets/disgust.jpg"));
                            image.Source = disgust;
                            break;

                        case 6:
                            BitmapImage neutral =
                                new BitmapImage(new Uri("ms-appx:///Assets/neutral.png"));
                            image.Source = neutral;
                            break;
                        }
                    }
                }
            }
            catch
            {
                BitmapImage error = new BitmapImage(new Uri("ms-appx:///Assets/error.jpg"));
                image.Source = error;
            }
        }
Exemple #36
0
        private async void Button_Click(object sender, RoutedEventArgs e)
        {
            ItemsNuevos.Clear();
            cnvCanvasSelected.Children.Clear();
            Items.Clear();
            cnvCanvas.Children.Clear();
            LoadingControl.IsLoading = true;
            resultadosDistancia.Clear();

            Windows.Storage.Pickers.FileOpenPicker openPicker = new Windows.Storage.Pickers.FileOpenPicker();
            openPicker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.Desktop;
            openPicker.ViewMode = Windows.Storage.Pickers.PickerViewMode.Thumbnail;
            openPicker.FileTypeFilter.Clear();
            openPicker.FileTypeFilter.Add(".bmp");
            openPicker.FileTypeFilter.Add(".png");
            openPicker.FileTypeFilter.Add(".jpeg");
            openPicker.FileTypeFilter.Add(".jpg");
            storageFile = await openPicker.PickSingleFileAsync();


            if (storageFile != null)
            {
                var stream = await storageFile.OpenAsync(FileAccessMode.Read);

                randomAccessStreamImage = stream;
                Stream streamImage = randomAccessStreamImage.AsStream();
                byte[] b;

                using (BinaryReader br = new BinaryReader(streamImage))
                {
                    b = br.ReadBytes((int)streamImage.Length);
                }
                byte[] byteArray;
                using (Windows.Storage.Streams.IRandomAccessStream fileStream = await storageFile.OpenAsync(Windows.Storage.FileAccessMode.Read))
                {
                    Windows.UI.Xaml.Media.Imaging.BitmapImage bitmapImage = new Windows.UI.Xaml.Media.Imaging.BitmapImage();
                    bitmapImage.SetSource(fileStream);
                    width  = bitmapImage.PixelWidth;
                    heigth = bitmapImage.PixelHeight;
                    if (width > 400)
                    {
                        constante = 1 / (width / 600);

                        width  = width * constante;
                        heigth = heigth * constante;
                    }
                    byteArray = new byte[fileStream.AsStream().Length];



                    cnvCanvas.Width          = (int)width;
                    cnvCanvas.Height         = (int)heigth;
                    cnvCanvasSelected.Width  = (int)width;
                    cnvCanvasSelected.Height = (int)heigth;

                    imagePreview.Width  = (int)width;
                    imagePreview.Height = (int)heigth;
                    imagePreview.Source = bitmapImage;

                    imagePreview.Opacity = 0.4;


                    vsproject = vs.Last().ToString();


                    try
                    {
                        var client = new HttpClient();
                        client.DefaultRequestHeaders.Add("Prediction-Key", localSettings.Values["apiKeyCV"] as string);
                        ByteArrayContent bytearrayContent = new ByteArrayContent(b);
                        bytearrayContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
                        var projecto       = vsproject;
                        var byteArrayNuevo = bytearrayContent;
                        var result         = await client.PostAsync("https://southcentralus.api.cognitive.microsoft.com/customvision/v2.0/Prediction/" + vsproject + "/image", bytearrayContent);

                        if (result.IsSuccessStatusCode)
                        {
                            var response = result.Content.ReadAsStringAsync().Result;

                            dynamic obj                 = JsonConvert.DeserializeObject <dynamic>(response);
                            int     contador            = 0;
                            var     dynamicImagePredict = obj.id;
                            idPredictedImage = (string)dynamicImagePredict;

                            foreach (var item in obj.predictions)
                            {
                                var resultadoCaja = item.boundingBox;
                                var resultadoTag  = item.tagName;
                                if (item.probability * 100 >= 75)
                                {
                                    imagePreview.Opacity    = 1;
                                    imagePreview.Visibility = Visibility.Visible;
                                    dibujarCaja(resultadoCaja, resultadoTag, item.probability);


                                    LoadingControl.IsLoading = false;

                                    contador += 1;
                                }
                                ;
                            }
                            txtResult.Text = "Se encontraron: " + contador.ToString() + " elementos";

                            SolidColorBrush solidColorBrushSalud = new SolidColorBrush(Windows.UI.Colors.Green);
                            txtResultSalud.Foreground = solidColorBrushSalud;

                            txtResultSalud.Text = "La salud del cultivo es OPTIMA";
                        }
                    }
                    catch (Exception ex)
                    {
                        var error = ex.Message.ToString();
                        throw;
                    }
                }
            }
        }
        /// <summary>
        /// Load the family and their notes from local storage
        /// </summary>
        /// <returns>Null if there was no model to load, otherwise, the deserialized model</returns>
        private async Task <Model> LoadModelAsync()
        {
            Model model = null;

            InkStrokeContainer combinedStrokes     = new InkStrokeContainer(); // To avoid managing individual files for every InkCanvas, we will combine all ink stroke information into one container
            List <int>         InkStrokesPerCanvas = new List <int>();

            try
            {
                StorageFile modelDataFile = await ApplicationData.Current.LocalFolder.GetFileAsync(NOTES_MODEL_FILE);

                using (IRandomAccessStream randomAccessStream = await modelDataFile.OpenAsync(FileAccessMode.ReadWrite))
                {
                    // Load the model which contains the people and the note collection
                    try
                    {
                        DataContractJsonSerializer modelSerializer = new DataContractJsonSerializer(typeof(Model));
                        model = (Model)modelSerializer.ReadObject(randomAccessStream.AsStream());
                    }
                    catch (System.Runtime.Serialization.SerializationException)
                    {
                        System.Diagnostics.Debug.Assert(false, "Failed to load serialized model");
                        return(null);
                    }
                }

                // For each sticky note, load the number of inkstrokes it contains
                StorageFile inkDataFile = await ApplicationData.Current.LocalFolder.GetFileAsync(NOTES_INK_FILE);

                using (IInputStream inkStream = await inkDataFile.OpenSequentialReadAsync())
                {
                    bool       combinedStrokesExist = false;
                    DataReader reader = new DataReader(inkStream);
                    foreach (StickyNote n in model.StickyNotes)
                    {
                        await reader.LoadAsync(sizeof(int)); // You need to buffer the data before you can read from a DataReader.

                        int numberOfInkStrokes = reader.ReadInt32();
                        InkStrokesPerCanvas.Add(numberOfInkStrokes);
                        combinedStrokesExist |= numberOfInkStrokes > 0;
                    }

                    // Load the ink data
                    if (combinedStrokesExist)
                    {
                        await combinedStrokes.LoadAsync(inkStream);
                    }
                } // using inkStream
            }     // try
            catch (FileNotFoundException)
            {
                // No data to load. We'll start with a fresh model
                return(null);
            }

            // Factor out the inkstrokes from the big container into each note
            int allStrokesIndex = 0, noteIndex = 0;
            IReadOnlyList <InkStroke> allStrokes = combinedStrokes.GetStrokes();

            foreach (StickyNote n in model.StickyNotes)
            {
                // InkStrokeContainers can't be serialized using the default xml/json serialization.
                // So create a new one and fill it up from the data we restored
                n.Ink = new InkStrokeContainer();
                // pull out the ink strokes that belong to this note
                for (int i = 0; i < InkStrokesPerCanvas[noteIndex]; i++)
                {
                    n.Ink.AddStroke(allStrokes[allStrokesIndex++].Clone());
                }
                ++noteIndex;
            }

            return(model);
        }
Exemple #38
0
        public async Task <Stream> ConvertToStream(StorageFile photoFile)
        {
            IRandomAccessStream stream = await photoFile.OpenAsync(FileAccessMode.Read);

            return(stream.AsStream());
        }