private async void AnalysisOnce(string filename)
        {
            //if (!CameraList.HasItems) {
            //    MessageArea.Text = "No cameras found; cannot start processing";
            //    return;
            //}

            // Clean leading/trailing spaces in API keys.
            Properties.Settings.Default.FaceAPIKey   = Properties.Settings.Default.FaceAPIKey.Trim();
            Properties.Settings.Default.VisionAPIKey = Properties.Settings.Default.VisionAPIKey.Trim();

            // Create API clients.
            _visionClient = new VisionAPI.VisionServiceClient(Properties.Settings.Default.VisionAPIKey, Properties.Settings.Default.VisionAPIHost);

            // How often to analyze.
            _grabber.TriggerAnalysisOnInterval(Properties.Settings.Default.AnalysisInterval);

            // Reset message.
            MessageArea.Text = "";

            // Record start time, for auto-stop
            _startTime = DateTime.Now;

            await _grabber.StartProcessingCameraAsync(filename);

            System.Threading.Thread.Sleep(3000);

            await _grabber.StopProcessingAsync();
        }
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            if (!CameraList.HasItems)
            {
                MessageArea.Text = "No cameras found; cannot start processing";
                return;
            }

            // Clean leading/trailing spaces in API keys.
            Properties.Settings.Default.FaceAPIKey   = Properties.Settings.Default.FaceAPIKey.Trim();
            Properties.Settings.Default.VisionAPIKey = Properties.Settings.Default.VisionAPIKey.Trim();

            // Create API clients.
            //_faceClient = new FaceAPI.FaceServiceClient(Properties.Settings.Default.FaceAPIKey, Properties.Settings.Default.FaceAPIHost);
            //_visionClient = new VisionAPI.VisionServiceClient(Properties.Settings.Default.VisionAPIKey, Properties.Settings.Default.VisionAPIHost);

            _faceClient   = new FaceAPI.FaceServiceClient("3dc9cb73ed524c56a50c77fd4d0f2213", "https://westcentralus.api.cognitive.microsoft.com/face/v1.0");
            _visionClient = new VisionAPI.VisionServiceClient("04429ac7e3dd4adfb9afc09be3296177", "https://westcentralus.api.cognitive.microsoft.com/vision/v1.0");

            // How often to analyze.
            //_grabber.TriggerAnalysisOnInterval(Properties.Settings.Default.AnalysisInterval);
            TimeSpan time = TimeSpan.Parse("00:00:01");

            _grabber.TriggerAnalysisOnInterval(time);

            // Reset message.
            MessageArea.Text = "";

            // Record start time, for auto-stop
            _startTime = DateTime.Now;

            await _grabber.StartProcessingCameraAsync(CameraList.SelectedIndex);
        }
Exemplo n.º 3
0
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            // Clean leading/trailing spaces in API keys.
            Properties.Settings.Default.FaceAPIKey   = Properties.Settings.Default.FaceAPIKey.Trim();
            Properties.Settings.Default.VisionAPIKey = Properties.Settings.Default.VisionAPIKey.Trim();

            // Create API clients.
            _faceClient   = new FaceAPI.FaceServiceClient(Properties.Settings.Default.FaceAPIKey, Properties.Settings.Default.FaceAPIHost);
            _visionClient = new VisionAPI.VisionServiceClient(Properties.Settings.Default.VisionAPIKey, Properties.Settings.Default.VisionAPIHost);

            // How often to analyze.
            _grabber.TriggerAnalysisOnInterval(Properties.Settings.Default.AnalysisInterval);


            if (subject_check is true && time_check is true)
            {
                // Reset message.
                MessageArea.Text = "";
                AttendeeList.Items.Clear();
                NoAttendeeList.Items.Clear();
                // Record start time, for auto-stop
                _startTime = DateTime.Now;
                DB_NoAttendeeList();
                // 0번 카메라 실행
                await _grabber.StartProcessingCameraAsync(0);

                //웹캠
                //await _grabber.StartProcessingCameraAsync(1);
            }
Exemplo n.º 4
0
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            if (!CameraList.HasItems)
            {
                MessageArea.Text = "No cameras found; cannot start processing";
                return;
            }

            // Clean leading/trailing spaces in API keys.
            Properties.Settings.Default.FaceAPIKey   = Properties.Settings.Default.FaceAPIKey.Trim();
            Properties.Settings.Default.VisionAPIKey = Properties.Settings.Default.VisionAPIKey.Trim();

            // Create API clients.
            _faceClient   = new FaceAPI.FaceServiceClient(Properties.Settings.Default.FaceAPIKey, Properties.Settings.Default.FaceAPIHost);
            _visionClient = new VisionAPI.VisionServiceClient(Properties.Settings.Default.VisionAPIKey, Properties.Settings.Default.VisionAPIHost);

            // How often to analyze.
            _grabber.TriggerAnalysisOnInterval(Properties.Settings.Default.AnalysisInterval);

            // Reset message.
            MessageArea.Text = "";

            // Record start time, for auto-stop
            _startTime = DateTime.Now;

            await _grabber.StartProcessingCameraAsync(CameraList.SelectedIndex);
        }
Exemplo n.º 5
0
        public static async Task <string> AnalyzeAsync(byte[] image)
        {
            var visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            var vision         = new Microsoft.ProjectOxford.Vision.VisionServiceClient(SubscriptionKeys.ComputerVisionId, "https://westeurope.api.cognitive.microsoft.com/vision/v1.0");
            var result         = await vision.AnalyzeImageAsync(new MemoryStream(image) { Position = 0 }, visualFeatures);

            return(result.Description.Captions.OrderBy(c => c.Confidence).Select(c => c.Text).FirstOrDefault());
        }
        private void Button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog d = new OpenFileDialog();

            if (d.ShowDialog() != DialogResult.OK)
            {
                return;
            }

            string filePath = d.FileName;
            //OCR OcrResults
            OcrResults OcrResults = default(OcrResults);
            //建立VisionServiceClient
            var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(
                "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!換成你的key!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ",
                "https://southeastasia.api.cognitive.microsoft.com/vision/v1.0");

            using (var fs = new FileStream(filePath, FileMode.Open))
            {
                //  this.textBox.Text = "辨識中...";
                //以繁體中文辨識
                //  this.textBox.Text = "";
                OcrResults = visionClient.RecognizeTextAsync(fs, LanguageCodes.AutoDetect).Result;
            }

            string result = "";

            //抓取每一區塊的辨識結果
            foreach (var Region in OcrResults.Regions)
            {
                //抓取每一行
                foreach (var line_loopVariable in Region.Lines)
                {
                    var     line  = line_loopVariable;
                    dynamic aline = "";
                    //抓取每一個字
                    foreach (var Word_loopVariable in line.Words)
                    {
                        var Word = Word_loopVariable;
                        //顯示辨識結果
                        aline += Word.Text;
                    }

                    //加換行
                    result += aline + "\n";
                }
            }

            this.TextBox1.Text = result;

            //load picture
            this.PictureBox1.Image = Image.FromFile(filePath);
        }
Exemplo n.º 7
0
        public async Task <string> Run(string base64EncodedImage)
        {
            var VisionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(
                //FIXME Private Information
                //==================================================
                "9ae131be7a4b43d68a08bccb1375bf8b"
                //==================================================
                );
            var response = await VisionClient.DescribeAsync(new MemoryStream(Convert.FromBase64String(base64EncodedImage)));

            return(response.Description.Captions.First()?.Text);
        }
Exemplo n.º 8
0
        private void button3_Click(object sender, EventArgs e)
        {
            var msg = "";

            //取得圖片檔案FileStream
            byte[] file       = System.IO.File.ReadAllBytes(this.textBox1.Text);
            Stream MemStream1 = new MemoryStream(file);
            Stream MemStream2 = new MemoryStream(file);

            //繪圖用
            System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(MemStream2);
            Graphics g = Graphics.FromImage(bmp);
            //ComputerVision instance
            var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(
                ComputerVisionServiceKey, ComputerVisionServiceEndpoint);

            //分析用
            using (MemStream1)
            {
                //分析圖片
                var OcrResults = visionClient.RecognizeTextAsync(
                    MemStream1, LanguageCodes.AutoDetect).Result;

                //抓取每一區塊的辨識結果
                foreach (var Region in OcrResults.Regions)
                {
                    //抓取每一行
                    foreach (var line in Region.Lines)
                    {
                        //畫框
                        g.DrawRectangle(
                            new Pen(Brushes.Red, 3),
                            new System.Drawing.Rectangle(line.Rectangle.Left, line.Rectangle.Top,
                                                         line.Rectangle.Width, line.Rectangle.Height));

                        var aline = "";
                        //抓取每一個字
                        foreach (var Word in line.Words)
                        {
                            //顯示辨識結果
                            aline += Word.Text;
                        }

                        //加換行
                        msg += aline + "  ,  ";
                    }
                }
            }
            this.textBox2.Text     = msg;
            this.pictureBox2.Image = bmp;
        }
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            if (!CameraList.HasItems)
            {
                MessageArea.Text = "No cameras found; cannot start processing";
                return;
            }

            // Clean leading/trailing spaces in API keys.
            Properties.Settings.Default.FaceAPIKey   = Properties.Settings.Default.FaceAPIKey.Trim();
            Properties.Settings.Default.VisionAPIKey = Properties.Settings.Default.VisionAPIKey.Trim();

            if (Uri.IsWellFormedUriString(faceEndpoint, UriKind.Absolute))
            {
                _faceClient.Endpoint = faceEndpoint;
                MessageArea.Text     = "Endpoint Initiated";
            }
            else
            {
                MessageBox.Show(faceEndpoint,
                                "Invalid URI", MessageBoxButton.OK, MessageBoxImage.Error);
                Environment.Exit(0);
            }

            // Create API clients.
            //_faceClient = new FaceAPI.FaceServiceClient(Properties.Settings.Default.FaceAPIKey, Properties.Settings.Default.FaceAPIHost);
            //_faceClient = new FaceClient(
            //new ApiKeyServiceClientCredentials("fa6122356e204afea55c8c590b3caa82"),
            //new System.Net.Http.DelegatingHandler[] { });
            //fo = new FaceOperations(_faceClient);
            _visionClient = new VisionAPI.VisionServiceClient(Properties.Settings.Default.VisionAPIKey, Properties.Settings.Default.VisionAPIHost);

            // How often to analyze.
            _grabber.TriggerAnalysisOnInterval(Properties.Settings.Default.AnalysisInterval);

            // Reset message.
            MessageArea.Text = "";

            // Record start time, for auto-stop
            _startTime = DateTime.Now;

            await _grabber.StartProcessingCameraAsync(CameraList.SelectedIndex);

            // Daniel CreateGroup called
            string            groupId   = "myFriends";
            CreateGroupPerson faceGroup = new CreateGroupPerson();
            string            writeBack = await faceGroup.FaceGroup(groupId);

            MessageArea.Text += $"Group: {groupId} created: extra messages {writeBack}";
        }
Exemplo n.º 10
0
        public static OcrResults AnalyzeText(Bitmap sourceImage)
        {
            Microsoft.ProjectOxford.Vision.VisionServiceClient visionServiceClient = ComputerVisionService.GetClient();

            using (MemoryStream memoryStream = new MemoryStream())
            {
                sourceImage.SaveAsPng(memoryStream);
                memoryStream.Position = 0;

                Console.WriteLine("Calling VisionServiceClient.RecognizeTextAsyncs()...");

                return(visionServiceClient.RecognizeTextAsync(memoryStream).GetAwaiter().GetResult());
            }
        }
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            StopButton.Visibility  = Visibility.Visible;
            StartButton.Visibility = Visibility.Collapsed;

            // Create API clients.
            _faceClient   = new FaceAPI.FaceServiceClient(faceApiKey, faceApiHost);
            _visionClient = new VisionAPI.VisionServiceClient(visionApiKey, visionApiHost);

            // How often to analyze.
            _grabber.TriggerAnalysisOnInterval(TimeSpan.FromSeconds(measurementInterval));

            // What to do for analysis
            _grabber.AnalysisFunction = AnalysisFunction;

            // Reset message.
            //MessageArea.Text = "";

            // Record start time, for auto-stop
            _startTime = DateTime.Now;

            await _grabber.StartProcessingCameraAsync(0);
        }
        /// <summary>
        /// Demo用的圖片分析
        /// </summary>
        /// <param name="filePath"></param>
        /// <returns></returns>
        private AnalysisImageResult  AnalysisImage(string filePath)
        {
            //回傳物件
            var AnalysisImageResult = new AnalysisImageResult();

            try
            {
                //取得原始檔案讀入BPM
                var fs2 = new FileStream(filePath, FileMode.Open);
                System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(fs2);
                Graphics g = Graphics.FromImage(bmp);
                fs2.Close();
                //辨識
                var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(
                    ComputerVisionKey, endpoint);
                using (var fs = new FileStream(filePath, FileMode.Open))
                {
                    var Results = visionClient.AnalyzeImageAsync(
                        fs, new VisualFeature[] { VisualFeature.Faces, VisualFeature.Description }).Result;

                    int resizeFactor = 1;

                    int isM = 0, isF = 0;
                    //如果找到臉,就畫
                    foreach (var item in Results.Faces)
                    {
                        var faceRect = item.FaceRectangle;
                        //畫框
                        g.DrawRectangle(
                            new Pen(Brushes.Red, 10),
                            new Rectangle(
                                faceRect.Left * resizeFactor,
                                faceRect.Top * resizeFactor,
                                faceRect.Width * resizeFactor,
                                faceRect.Height * resizeFactor
                                ));
                        //顯示年紀
                        var age = 0;
                        if (item.Gender.StartsWith("F"))
                        {
                            age = item.Age - 2;
                        }
                        else
                        {
                            age = item.Age;
                        }
                        g.DrawString(age.ToString(), new Font("Arial", 16),
                                     new SolidBrush(Color.Black),
                                     faceRect.Left * resizeFactor + 3, faceRect.Top * resizeFactor + 3);
                        //紀錄性別
                        if (item.Gender.StartsWith("M"))
                        {
                            isM += 1;
                        }
                        else
                        {
                            isF += 1;
                        }
                    }
                    //顯示分析結果

                    AnalysisImageResult.PictureDescription = Results.Description.Captions[0].Text;
                    //如果update了照片,則顯示新圖
                    if (Results.Faces.Count() > 0)
                    {
                        AnalysisImageResult.FaceDescription = String.Format("找到{0}張臉, {1}男 {2}女", Results.Faces.Count(), isM, isF);
                        AnalysisImageResult.isFaceFound     = true;
                        var filename = Guid.NewGuid() + System.IO.Path.GetExtension(filePath);
                        bmp.Save(System.Web.HttpContext.Current.Request.MapPath("/_pic/" + filename));
                        AnalysisImageResult.NewImageURL = "/_pic/" + filename;
                    }
                    return(AnalysisImageResult);
                }
            }
            catch (Exception ex)
            {
                //process exception
                throw ex;
            }
        }
Exemplo n.º 13
0
        public IHttpActionResult POST()
        {
            try
            {
                //設定ChannelAccessToken(或抓取Web.Config)
                this.ChannelAccessToken = channelAccessToken;
                //取得Line Event(範例,只取第一個)
                var LineEvent          = this.ReceivedMessage.events.FirstOrDefault();
                isRock.LineBot.Bot bot = new isRock.LineBot.Bot(channelAccessToken);
                var UserInfo           = bot.GetUserInfo(LineEvent.source.userId);

                //配合Line verify
                //
                if (LineEvent.replyToken == "00000000000000000000000000000000")
                {
                    return(Ok());
                }
                if (LineEvent.type == "postback")
                {
                    var data = LineEvent.postback.data;
                    var dt   = LineEvent.postback.Params.time;
                    this.ReplyMessage(LineEvent.replyToken, $"觸發了 postback \n 資料為:{data}\n 選擇結果:{dt} ");
                }
                if (LineEvent.type == "message")
                {
                    //回覆訊息
                    //if (LineEvent.message.type == "sticker") //收到貼圖
                    //    this.ReplyMessage(LineEvent.replyToken, 1, 2);
                    if (LineEvent.message.type == "location") //GPS
                    {
                        this.ReplyMessage(LineEvent.replyToken, $"你的位置在\n{LineEvent.message.latitude},{LineEvent.message.longitude}\n{LineEvent.message.address}");
                    }

                    if (LineEvent.message.type == "text")
                    {
                        if (LineEvent.message.text == "Hello")
                        {
                            this.ReplyMessage(LineEvent.replyToken, UserInfo.displayName + "您好,今天適合穿短袖上衣");
                        }
                    }

                    if (LineEvent.message.text == "餓了嗎")
                    {
                        var bott = new Bot(channelAccessToken);
                        //建立actions,作為ButtonTemplate的用戶回復行為
                        var actions = new List <isRock.LineBot.TemplateActionBase>();
                        actions.Add(new isRock.LineBot.MessageAction()
                        {
                            label = "Yes", text = "Yes"
                        });
                        actions.Add(new isRock.LineBot.MessageAction()
                        {
                            label = "No", text = "No"
                        });

                        var ConfirmTemplate = new isRock.LineBot.ConfirmTemplate()
                        {
                            text    = "請選擇其中之一",
                            altText = "請在手機上檢視",

                            actions = actions
                        };
                        bott.PushMessage(AdminUserId, ConfirmTemplate);
                    }
                    if (LineEvent.message.text == "Yes")
                    {
                        var bot1 = new Bot(channelAccessToken);
                        //建立actions,作為ButtonTemplate的用戶回復行為
                        var actions = new List <isRock.LineBot.TemplateActionBase>();
                        actions.Add(new isRock.LineBot.MessageAction()
                        {
                            label = "標題-文字回復", text = "回復文字"
                        });
                        actions.Add(new isRock.LineBot.UriAction()
                        {
                            label = "選擇餐廳", uri = new Uri("https://tgifridays.com.tw/locations")
                        });
                        // actions.Add(new isRock.LineBot.PostbackAction() { label = "標題-發生postback", data = "abc=aaa&def=111" });
                        actions.Add(new isRock.LineBot.DateTimePickerAction()
                        {
                            label = "請選擇時間", mode = "date"
                        });
                        var ButtonTemplateMsg = new isRock.LineBot.ButtonsTemplate()
                        {
                            title             = "選項",
                            text              = "請選擇其中之一",
                            altText           = "請在手機上檢視",
                            thumbnailImageUrl = new Uri("https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSQSfptbc-INs9IUyaBi7xU3_Hr52NbdIEOwOa_gX5xrrQXEd0m7w"),
                            actions           = actions
                        };
                        bot1.PushMessage(AdminUserId, ButtonTemplateMsg);
                    }
                    if (LineEvent.message.type == "image")
                    {
                        //取得圖片Bytes
                        var bytes = this.GetUserUploadedContent(LineEvent.message.id);
                        //儲存為圖檔
                        var guid     = Guid.NewGuid().ToString();
                        var filename = $"{guid}.png";
                        var path     = System.Web.Hosting.HostingEnvironment.MapPath("~/Temps/");

                        System.IO.File.WriteAllBytes(path + filename, bytes);

                        //取得base URL
                        var baseUrl = Request.RequestUri.GetLeftPart(UriPartial.Authority);
                        //組出外部可以讀取的檔名
                        var url = $"{baseUrl}/Temps/{filename}";
                        // this.ReplyMessage(LineEvent.replyToken, $"你的圖片位於\n{url}");
                        var fs1 = new FileStream(path + filename, FileMode.Open);
                        System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(fs1);
                        Graphics g = Graphics.FromImage(bmp);
                        fs1.Close();

                        var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(VisionAPIKey,
                                                                                                  "https://southeastasia.api.cognitive.microsoft.com/vision/v1.0");

                        //分析圖片(從FileUpload1.PostedFile.InputStream取得影像)
                        //分析 Faces & Description

                        var Results = visionClient.AnalyzeImageAsync(url,
                                                                     new VisualFeature[] { VisualFeature.Faces, VisualFeature.Description }).Result;

                        int isM = 0, isF = 0;
                        foreach (var Face in Results.Faces)
                        {
                            //取得人臉位置
                            var faceRect = Face.FaceRectangle;
                            //繪製人臉紅框
                            g.DrawRectangle(
                                new Pen(Brushes.Red, 10),
                                new Rectangle(faceRect.Left, faceRect.Top,
                                              faceRect.Width, faceRect.Height));

                            Font       drawFont   = new Font("Arial", 40);
                            SolidBrush drawBrush  = new SolidBrush(Color.Red);
                            String     drawString = Face.Age.ToString();

                            g.DrawString(drawString + "歲", drawFont, drawBrush, new Point(faceRect.Left - 30, faceRect.Top - 50));

                            //計算幾男幾女
                            if (Face.Gender.StartsWith("M"))
                            {
                                isM += 1;
                            }
                            else
                            {
                                isF += 1;
                            }
                        }
                        var path1       = System.Web.Hosting.HostingEnvironment.MapPath("~/Temps/");
                        var newfilename = Guid.NewGuid().ToString() + ".png";
                        bmp.Save(path1 + newfilename);

                        this.ReplyMessage(LineEvent.replyToken, new Uri($"{baseUrl}/Temps/{newfilename}"));
                    }
                }

                return(Ok());

                //this.ReplyMessage(LineEvent.replyToken, "Hello,你的UserId是:" + LineEvent.source.userId);
            }
            catch (Exception ex)
            {
                //如果發生錯誤,傳訊息給Admin
                this.PushMessage(AdminUserId, "發生錯誤:\n" + ex.Message);
                //response OK
                return(Ok());
            }
        }
Exemplo n.º 14
0
        protected async void Button1_Click(object sender, EventArgs e)
        {
            //如果沒有上傳就掰掰
            if (!this.FileUpload1.HasFile)
            {
                return;
            }

            //先把用戶上傳的檔案存起來
            string filename = Guid.NewGuid() + System.IO.Path.GetExtension(this.FileUpload1.PostedFile.FileName);
            string filePath = MapPath("pic/" + filename);

            //存檔
            this.FileUpload1.PostedFile.SaveAs(filePath);
            //顯示圖檔
            this.Image1.ImageUrl = "pic/" + filename;

            //將圖檔載入Bitmap ,便於繪圖,並且直接產生 Graphics物件
            var fs1 = new FileStream(filePath, FileMode.Open);

            System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(fs1);
            Graphics g = Graphics.FromImage(bmp);

            fs1.Close();

            //使用Computer Vision API
            var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(VisionAPIKey, "https://southeastasia.api.cognitive.microsoft.com/vision/v1.0");

            //分析圖片(從FileUpload1.PostedFile.InputStream取得影像)
            //分析 Faces & Description
            var Results = await visionClient.AnalyzeImageAsync(this.FileUpload1.PostedFile.InputStream,
                                                               new VisualFeature[] { VisualFeature.Faces, VisualFeature.Description });

            int isM = 0, isF = 0;

            //列出每一個找到的臉
            foreach (var Face in Results.Faces)
            {
                //取得人臉位置
                var faceRect = Face.FaceRectangle;
                //繪製人臉紅框
                g.DrawRectangle(
                    new Pen(Brushes.Red, 10),
                    new Rectangle(faceRect.Left, faceRect.Top,
                                  faceRect.Width, faceRect.Height));
                //計算幾男幾女
                if (Face.Gender.StartsWith("M"))
                {
                    isM += 1;
                }
                else
                {
                    isF += 1;
                }

                //計算歲數
                Font       drawFont  = new Font("Arial", 20);
                SolidBrush drawBrush = new SolidBrush(Color.Red);
                PointF     drawPoint = new PointF(faceRect.Left, faceRect.Top - 25);
                g.DrawString(Face.Age.ToString() + "歲", drawFont, drawBrush, drawPoint);
            }

            //顯示圖片資訊
            this.Label1.Text = String.Format("Face : 找到{0}張臉, {1}男 {2}女", Results.Faces.Count(), isM, isF);
            this.Label2.Text = "說明 : " + Results.Description.Captions[0].Text;

            //如果有找到臉,顯示有紅框的圖
            if (Results.Faces.Count() > 0)
            {
                //建立新檔案
                filename = Guid.NewGuid() + System.IO.Path.GetExtension(FileUpload1.PostedFile.FileName);
                bmp.Save(MapPath("pic/" + filename));
                //顯示新檔案
                this.Image1.ImageUrl = "pic/" + filename;
            }
        }
 public VisionHandler(string Key)
 {
     Client = new Microsoft.ProjectOxford.Vision.VisionServiceClient(Key);
 }
        private void Button2_Click(object sender, EventArgs e)
        {
            OpenFileDialog d = new OpenFileDialog();

            if (d.ShowDialog() != DialogResult.OK)
            {
                return;
            }

            string filePath = d.FileName;

            //取得原始檔案讀入BPM
            var fs2 = new FileStream(filePath, FileMode.Open);

            System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(fs2);
            Graphics g = Graphics.FromImage(bmp);

            fs2.Close();

            //OCR OcrResults
            AnalysisResult AnalysisResult = default(AnalysisResult);

            //建立VisionServiceClient
            var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(
                "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!換成你的key!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ",
                "https://southeastasia.api.cognitive.microsoft.com/vision/v1.0");

            using (var fs = new FileStream(filePath, FileMode.Open))
            {
                //  this.textBox.Text = "辨識中...";
                //以繁體中文辨識
                //  this.textBox.Text = "";
                var t = new List <VisualFeature>();
                t.Add(VisualFeature.Faces);
                t.Add(VisualFeature.Description);
                AnalysisResult = visionClient.AnalyzeImageAsync(fs, t).Result;
            }

            int isM          = 0;
            int isF          = 0;
            int resizeFactor = 1;

            string result = "";

            //抓取每一區塊的辨識結果
            foreach (var item_loopVariable in AnalysisResult.Faces)
            {
                var item     = item_loopVariable;
                var faceRect = item.FaceRectangle;

                //畫框
                g.DrawRectangle(
                    new Pen(Brushes.Red, 10),
                    new System.Drawing.Rectangle((int)faceRect.Left * resizeFactor,
                                                 (int)faceRect.Top * resizeFactor, (int)faceRect.Width * resizeFactor, (int)faceRect.Height * resizeFactor));
                //顯示年紀
                var age = 0;
                if (item.Gender.StartsWith("F"))
                {
                    age = item.Age - 2;
                }
                else
                {
                    age = item.Age;
                }
                g.DrawString(age.ToString(), new Font("Arial", 16), new SolidBrush(System.Drawing.Color.Black), faceRect.Left * resizeFactor + 3, faceRect.Top * resizeFactor + 3);
                //紀錄性別
                if (item.Gender.StartsWith("M"))
                {
                    isM += 1;
                }
                else
                {
                    isF += 1;
                }
            }

            var PictureDescription = AnalysisResult.Description.Captions[0].Text;

            //如果update了照片,則顯示新圖
            if (AnalysisResult.Faces.Count() > 0)
            {
                PictureDescription += String.Format("找到{0}張臉, {1}男 {2}女", AnalysisResult.Faces.Count(), isM, isF);
                string filename = Guid.NewGuid().ToString() + System.IO.Path.GetExtension(filePath);
                bmp.Save(filename);
                //load picture
                this.PictureBox1.Image = Image.FromFile(filename);
            }

            this.TextBox1.Text = PictureDescription;
        }
Exemplo n.º 17
0
        private void InitializeClientAPIs()
        {
            // Create API clients.
            _faceClient   = new FaceAPI.FaceServiceClient(FACE_API_KEY, FACE_API_ROOT);
            _visionClient = new VisionAPI.VisionServiceClient(VISION_API_KEY, VISION_API_ROOT);

            _grabber.NewFrameProvided += (s, e) =>
            {
                this.Dispatcher.BeginInvoke((Action)(() =>
                {
                    // Display the image in the left pane captured from camera.
                    LeftImage.Source = e.Frame.Image.ToBitmapSource();
                }));

                // See if auto-stop should be triggered.
                if (Properties.Settings.Default.AutoStopEnabled && (DateTime.Now - _startTime) > Properties.Settings.Default.AutoStopTime)
                {
                    _grabber.StopProcessingAsync();
                }
            };

            // Set up a listener for when the client receives a new result from an API call.
            _grabber.NewResultAvailable += (s, e) =>
            {
                this.Dispatcher.BeginInvoke((Action)(() =>
                {
                    if (e.TimedOut)
                    {
                        MessageArea.Text = "API call timed out.";
                    }
                    else if (e.Exception != null)
                    {
                        string apiName = "";
                        string message = e.Exception.Message;
                        var faceEx = e.Exception as FaceAPI.FaceAPIException;
                        var emotionEx = e.Exception as Common.ClientException;
                        var visionEx = e.Exception as VisionAPI.ClientException;
                        if (faceEx != null)
                        {
                            apiName = "Face";
                            message = faceEx.ErrorMessage;
                        }
                        else if (emotionEx != null)
                        {
                            apiName = "Emotion";
                            message = emotionEx.Error.Message;
                        }
                        else if (visionEx != null)
                        {
                            apiName = "Computer Vision";
                            message = visionEx.Error.Message;
                        }
                        MessageArea.Text = string.Format("{0} API call failed on frame {1}. Exception: {2}", apiName, e.Frame.Metadata.Index, message);
                    }
                    else
                    {
                        if (_mode == AppMode.Text)
                        {
                            if (e.AnalysisResult != null)
                            {
                                foreach (var lr in e.AnalysisResult.Regions)
                                {
                                    if (lr.Lines.Any())
                                    {
                                        StringBuilder builder = new StringBuilder();
                                        foreach (var line in lr.Lines)
                                        {
                                            builder.AppendLine(string.Join(" ", line.Words.Select(w => w.Text).ToArray()));
                                        }

                                        this.Dispatcher.Invoke(() =>
                                        {
                                            ResultList.Items.Add($"Text detected: {builder.ToString()}");
                                        });
                                    }
                                }
                            }
                        }
                        else if (_mode == AppMode.Emotions)
                        {
                            if (e.AnalysisResult != null)
                            {
                                var faces = e.AnalysisResult.EmotionFaces;
                                this.Dispatcher.Invoke(() =>
                                {
                                    foreach (var face in faces)
                                    {
                                        var bestEmotion = face.FaceAttributes.Emotion.ToRankedList().Select(kv => new Tuple <string, float>(kv.Key, kv.Value)).First();
                                        var displayText = string.Format("{0}: {1:N1}", bestEmotion.Item1, bestEmotion.Item2);
                                        ResultList.Items.Add($"Emotion '{displayText}' is detected on face {face.FaceId}.");
                                    }
                                });
                            }
                        }
                        else if (_mode == AppMode.Faces)
                        {
                            if (e.AnalysisResult != null)
                            {
                                var result = e.AnalysisResult.FaceIdentifyResult;
                                this.Dispatcher.Invoke((Action)(() =>
                                {
                                    foreach (var sItem in result)
                                    {
                                        ResultList.Items.Add(sItem);
                                    }
                                }));
                            }
                        }
                    }
                }));
            };

            // Create local face detector.
            _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml");
        }
Exemplo n.º 18
0
        private void button2_Click(object sender, EventArgs e)
        {
            var msg = "";

            //取得圖片檔案FileStream
            byte[] file       = System.IO.File.ReadAllBytes(this.textBox1.Text);
            Stream MemStream1 = new MemoryStream(file);
            Stream MemStream2 = new MemoryStream(file);

            //繪圖用
            System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(MemStream2);
            Graphics g = Graphics.FromImage(bmp);
            //ComputerVision instance
            var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(
                ComputerVisionServiceKey, ComputerVisionServiceEndpoint);

            //分析用
            using (MemStream1)
            {
                //分析圖片
                var Results = visionClient.AnalyzeImageAsync(
                    MemStream1, new VisualFeature[] { VisualFeature.Faces, VisualFeature.Adult }).Result;
                //分別保存性別數量
                int isM = 0, isF = 0;
                //如果找到臉,就畫方框標示出來
                foreach (var item in Results.Faces)
                {
                    var faceRect = item.FaceRectangle;
                    //畫框
                    g.DrawRectangle(
                        new Pen(Brushes.Red, 3),
                        new System.Drawing.Rectangle(faceRect.Left, faceRect.Top,
                                                     faceRect.Width, faceRect.Height));
                    //在方框旁邊顯示年紀
                    var age = 0;
                    if (item.Gender.StartsWith("F"))
                    {
                        age = item.Age - 2;
                    }
                    else
                    {
                        age = item.Age;
                    }
                    //劃出數字
                    g.DrawString(age.ToString(), new Font(SystemFonts.DefaultFont.FontFamily, 30, FontStyle.Bold),
                                 new SolidBrush(System.Drawing.Color.Blue),
                                 faceRect.Left + 3, faceRect.Top + 3);
                    //紀錄性別數量
                    if (item.Gender.StartsWith("M"))
                    {
                        isM += 1;
                    }
                    else
                    {
                        isF += 1;
                    }
                }
                //圖片分析結果
                if (Results.Description != null)
                {
                    msg += $"\n\r圖片說明:\n\r{Results.Description.Captions[0].Text}";
                }

                //如果update了照片,則顯示新圖
                if (Results.Faces.Count() > 0)
                {
                    msg += String.Format("\n\r找到{0}張臉, \n\r{1}男 {2}女", Results.Faces.Count(), isM, isF);
                }
            }
            this.textBox2.Text     = msg;
            this.pictureBox2.Image = bmp;
        }
        /// <summary>
        /// 處理照片
        /// </summary>
        /// <param name="LineEvent"></param>
        /// <param name="token"></param>
        /// <returns></returns>
        private List <isRock.LineBot.MessageBase> ProcessImage(isRock.LineBot.Event LineEvent, string token)
        {
            //web.config
            var    ComputerVisionServiceKey      = System.Configuration.ConfigurationManager.AppSettings["ComputerVisionServiceKey"];
            var    ComputerVisionServiceEndpoint = System.Configuration.ConfigurationManager.AppSettings["ComputerVisionServiceEndpoint"];
            string Msg = "";

            //取得照片
            //從LineEvent取得用戶上傳的圖檔bytes
            var byteArray = isRock.LineBot.Utility.GetUserUploadedContent(LineEvent.message.id, token);
            //取得圖片檔案FileStream, 分別作為繪圖與分析用
            Stream MemStream1 = new MemoryStream(byteArray);
            Stream MemStream2 = new MemoryStream(byteArray);

            //繪圖用
            System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(MemStream1);
            Graphics g = Graphics.FromImage(bmp);
            //ComputerVision instance
            var visionClient = new Microsoft.ProjectOxford.Vision.VisionServiceClient(
                ComputerVisionServiceKey, ComputerVisionServiceEndpoint);

            //分析用
            using (MemStream2)
            {
                //分析圖片
                var Results = visionClient.AnalyzeImageAsync(
                    MemStream2, new VisualFeature[] { VisualFeature.Faces, VisualFeature.Description }).Result;
                //分別保存性別數量
                int isM = 0, isF = 0;
                //如果找到臉,就畫方框標示出來
                foreach (var item in Results.Faces)
                {
                    var faceRect = item.FaceRectangle;
                    //畫框
                    g.DrawRectangle(
                        new Pen(Brushes.Red, 3),
                        new Rectangle(faceRect.Left, faceRect.Top,
                                      faceRect.Width, faceRect.Height));
                    //在方框旁邊顯示年紀
                    var age = 0;
                    if (item.Gender.StartsWith("F"))
                    {
                        age = item.Age - 2;
                    }
                    else
                    {
                        age = item.Age;
                    }
                    //劃出數字
                    g.DrawString(age.ToString(), new Font(SystemFonts.DefaultFont.FontFamily, 24, FontStyle.Bold),
                                 new SolidBrush(Color.Black),
                                 faceRect.Left + 3, faceRect.Top + 3);
                    //紀錄性別數量
                    if (item.Gender.StartsWith("M"))
                    {
                        isM += 1;
                    }
                    else
                    {
                        isF += 1;
                    }
                }
                //圖片分析結果
                Msg += $"\n圖片說明:\n{Results.Description.Captions[0].Text}";

                //如果update了照片,則顯示新圖
                if (Results.Faces.Count() > 0)
                {
                    Msg += String.Format("\n找到{0}張臉, \n{1}男 {2}女", Results.Faces.Count(), isM, isF);
                }
            }

            string ImgurURL = "";

            using (MemoryStream m = new MemoryStream())
            {
                bmp.Save(m, System.Drawing.Imaging.ImageFormat.Png);
                ImgurURL = UploadImage2Imgur(m.ToArray());
            }

            //上傳成功之後,image.Link會回傳 url
            //建立文字訊息
            isRock.LineBot.TextMessage TextMsg = new isRock.LineBot.TextMessage(Msg);
            //建立圖形訊息(用上傳後的網址)
            isRock.LineBot.ImageMessage imageMsg = new isRock.LineBot.ImageMessage(new Uri(ImgurURL), new Uri(ImgurURL));
            //建立集合
            var Messages = new List <isRock.LineBot.MessageBase>();

            Messages.Add(TextMsg);
            Messages.Add(imageMsg);

            //一次把集合中的多則訊息回覆給用戶
            return(Messages);
        }