コード例 #1
0
ファイル: MainWindow.xaml.cs プロジェクト: sclshu3714/INKT
        private string[] Regconize()         // REM 识别 ptinfos 中的点
        {
            List <System.Drawing.Point> listpts = new List <System.Drawing.Point>();

            foreach (var i in ptinfos)
            {
                listpts.Add(i.ToDrawingPoint());
            }
            // REM 每一笔转换成stroke
            Stroke stroke = ink.CreateStroke(listpts.ToArray());

            // REM 添加到识别器的上下文
            recognizectx.Strokes.Add(stroke);
            RecognitionStatus recognizestatus = new RecognitionStatus();
            RecognitionResult recognizeresult = recognizectx.Recognize(out recognizestatus);
            // REM 识别器的所有选择
            RecognitionAlternates recognizealternates = recognizeresult.GetAlternatesFromSelection();
            // REM 列出识别器所识别出的内容
            List <string> result = new List <string>();

            for (var i = 0; i <= recognizealternates.Count - 1; i++)
            {
                string text = recognizealternates[i].ToString();
                // Console.WriteLine(text)
                result.Add(text);
            }
            return(result.ToArray());
        }
コード例 #2
0
        public void NextGesture()
        {
            if (gestures.Count == 0) Debug.LogError("not enough gestures in library");

            PickAnotherRandomGesture();

            recognitionStatus = RecognitionStatus.Await;
            gestureRenderer.RenderGesture(curGesture);
            drawingBoard.CleanDrawingArea();
        }
コード例 #3
0
 public ResultText(
     Speech speech,
     string text,
     RecognitionStatus status,
     decimal confidence)
 {
     SetAudio(speech);
     SetText(text);
     SetDate(default(DateTime));
     SetConfidence(confidence);
     RecognitionStatus = status;
 }
コード例 #4
0
        public static async Task <string> ReconhecerFala(string contentUrl)
        {
            RecognitionStatus status       = RecognitionStatus.None;
            string            stringResult = string.Empty;
            WebClient         wc           = new WebClient();
            OgaToWavConverter converter    = new OgaToWavConverter();


            var ogaData = await wc.DownloadDataTaskAsync(contentUrl);

            var wavData = converter.Convert(ogaData);

            var preferences = new Preferences("pt-BR",
                                              new Uri(@"wss://speech.platform.bing.com/api/service/recognition"),
                                              new CognitiveServicesAuthorizationProvider(CognitiveServicesAuthorizationProvider.API_KEY));

            using (var speechClient = new SpeechClient(preferences))
            {
                speechClient.SubscribeToPartialResult(
                    result =>
                {
                    stringResult = result.DisplayText;
                    return(Task.FromResult(true));
                });

                speechClient.SubscribeToRecognitionResult(
                    result =>
                {
                    status = result.RecognitionStatus;
                    return(Task.FromResult(true));
                });

                var deviceMetadata      = new DeviceMetadata(DeviceType.Near, DeviceFamily.Unknown, NetworkType.Unknown, OsName.Windows, "10", "IBM", "ThinkCenter");
                var applicationMetadata = new ApplicationMetadata("WorkshopAtentoBot", "1.0");
                var requestMetada       = new RequestMetadata(Guid.NewGuid(), deviceMetadata, applicationMetadata, "ReconhecimentoFalaService");
                await speechClient.RecognizeAsync(new SpeechInput(new MemoryStream(wavData), requestMetada), CancellationToken.None).ConfigureAwait(false);
            }

            while (status == RecognitionStatus.None)
            {
                await Task.Delay(200);
            }

            if (status == RecognitionStatus.Success)
            {
                return(stringResult);
            }
            else
            {
                return($"Ocorreu um erro no reconhecimento de fala. Status = {status}");
            }
        }
コード例 #5
0
        public String Recognizer(String strokesStr, int count)
        {
            List <List <int[]> > strokes = new List <List <int[]> >();
            var array = Regex.Split(strokesStr, ",eb,");

            foreach (var item in array)
            {
                var stroke = new List <int[]>();
                var array2 = item.Split(',');
                for (var i = 0; i < array2.Length; i = i + 2)
                {
                    int[] point = new int[2];
                    point[0] = int.Parse(array2[i]);
                    point[1] = int.Parse(array2[i + 1]);
                    stroke.Add(point);
                }
                strokes.Add(stroke);
            }

            RecognizerContext recognizerContext = new Recognizers().GetDefaultRecognizer().CreateRecognizerContext();
            Ink ink = new Ink();

            recognizerContext.Strokes = ink.CreateStrokes();
            foreach (List <int[]> stroke in strokes)
            {
                Point[] points = new Point[stroke.Count];
                for (int i = 0; i < stroke.Count; i++)
                {
                    points[i] = new Point(stroke[i][0], stroke[i][1]);
                }
                recognizerContext.Strokes.Add(ink.CreateStroke(points));
            }
            RecognitionStatus recognitionStatus = RecognitionStatus.NoError;
            RecognitionResult recognitionResult = recognizerContext.Recognize(out recognitionStatus);
            var text = "";

            if (recognitionStatus == RecognitionStatus.NoError)
            {
                RecognitionAlternates alts = recognitionResult.GetAlternatesFromSelection();
                for (int i = 0; i < alts.Count && i < count; i++)
                {
                    RecognitionAlternate alt = alts[i];
                    text += alt.ToString() + " ";
                }
            }
            return(text.Trim());
        }
コード例 #6
0
        public void CompareShapes()
        {
            Gesture candidate = new Gesture(drawingBoard.DrawingPoints.ToArray());
            Result gestureResult = PointCloudRecognizer.Classify(candidate, new[] { curGesture });

            string statusString = "Shapes match on " + (int) (gestureResult.Score * 100) + " %." + Environment.NewLine;

            if (gestureResult.Score < RecognitionThreshold)
            {
                recognitionStatus = RecognitionStatus.Fail;
                statusString += "Threshold is " + (int)(RecognitionThreshold * 100) + " %. Try again";
                drawingBoard.CleanDrawingArea();
            }
            else
            {
                recognitionStatus = RecognitionStatus.Recognized;
                statusString += "You got it.";
                GestureRecognized();
            }
            statusText.text = statusString;
        }
コード例 #7
0
        public ResultText AddRecognitionResult(
            Speech speech,
            string text,
            RecognitionStatus status,
            decimal confidence)
        {
            if (speech == null)
            {
                throw new ArgumentNullException(nameof(speech));
            }
            if (string.IsNullOrWhiteSpace(text))
            {
                throw new ArgumentNullException(nameof(text));
            }

            ResultText resultText = new ResultText(speech, text, status, confidence);

            speech.AddRecognitionResult(resultText);

            _recognitionResultsRepository.Add(resultText);

            return(resultText);
        }
        private void InkCanvas_SelectionChanged(object sender, EventArgs e)
        {
            var selectedStrokes = InkCanvas.GetSelectedStrokes();

            using (MemoryStream ms = new MemoryStream())
            {
                selectedStrokes.Save(ms);
                var myInkCollector = new InkCollector();
                var ink            = new Ink();
                ink.Load(ms.ToArray());

                using (RecognizerContext myRecoContext = new RecognizerContext())
                {
                    RecognitionStatus status = RecognitionStatus.ProcessFailed;
                    myRecoContext.Strokes = ink.Strokes;
                    try
                    {
                        var recoResult = myRecoContext.Recognize(out status);

                        if (status == RecognitionStatus.NoError)
                        {
                            textBlock.Text = recoResult.TopString;
                            //InkCanvas.Strokes.Clear();
                        }
                        else
                        {
                            MessageBox.Show("ERROR: " + status.ToString());
                        }
                    }
                    catch (Exception)
                    {
                        //MessageBox.Show("ERROR: " + status.ToString());
                    }
                }
            }
        }
コード例 #9
0
 public RecognitionStatusDto(RecognitionStatus status)
 {
     RecognitionStatusAlias = status?.StatusAlias;
     RecognitionStatusName  = status?.StatusName;
     RecognitionStatusId    = status?.StatusId;
 }
コード例 #10
0
 private static extern IntPtr QISRGetResult(string sessionID, ref RecognitionStatus rsltStatus, int waitTime, ref int errorCode);
コード例 #11
0
 private static extern int QISRAudioWrite(string sessionID, byte[] waveData, uint waveLen, SampleStatus audioStatus, ref EndpointStatus epStatus, ref RecognitionStatus recogStatus);