public MainPage() { InitializeComponent(); _currentState = RecognitionMode.Idle; DataContext = Locator.Instance.Resolve <MainWindowViewModel>(); }
HttpRequestMessage CreateRequest(OutputMode outputMode) { try { var uriBuilder = new UriBuilder(SpeechEndpoint.Protocol, SpeechEndpoint.Host, SpeechEndpoint.Port, SpeechEndpoint.Path); uriBuilder.Path += $"/{RecognitionMode.ToString ().ToLower ()}/cognitiveservices/{ApiVersion}"; uriBuilder.Query = $"language={RecognitionLanguage}&format={outputMode.ToString ().ToLower ()}&profanity={ProfanityMode.ToString ().ToLower ()}"; Debug.WriteLine($"{DateTime.Now} :: Request Uri: {uriBuilder.Uri}"); var request = new HttpRequestMessage(HttpMethod.Post, uriBuilder.Uri); request.Headers.TransferEncodingChunked = true; request.Headers.ExpectContinue = true; request.Headers.Authorization = new AuthenticationHeaderValue(Constants.Keys.Bearer, AuthClient.Token); request.Headers.Accept.ParseAdd(Constants.MimeTypes.Json); request.Headers.Accept.ParseAdd(Constants.MimeTypes.Xml); request.Headers.Host = SpeechEndpoint.Host; return(request); } catch (Exception ex) { Debug.WriteLine(ex.Message); throw; } }
/// <summary> /// Checks the image limits for given recognize mode. /// </summary> /// <param name="imageStream">Image file stream.</param> /// <param name="mode">Recognize mode</param> /// <returns>Server response</returns> public bool checkImageLimits(FileStream imageStream, RecognitionMode mode) { Image image = Image.FromStream(imageStream); double imageSurface = (double)(image.Height * image.Width) / 1000000.0; double fileSize = imageStream.Length / 1000.0; if (mode == RecognitionMode.single) { if (fileSize > SINGLEIR_MAX_FILE_SIZE || image.Height < SINGLEIR_MIN_DIMENSION || image.Width < SINGLEIR_MIN_DIMENSION || imageSurface < SINGLEIR_MIN_IMAGE_SURFACE || imageSurface > SINGLEIR_MAX_IMAGE_SURFACE) { return(false); } } else if (mode == RecognitionMode.multi) { if (fileSize > MULTIPLEIR_MAX_FILE_SIZE || image.Height < MULTIPLEIR_MIN_DIMENSION || image.Width < MULTIPLEIR_MIN_DIMENSION || imageSurface < MULTIPLEIR_MIN_IMAGE_SURFACE || imageSurface > MULTIPLEIR_MAX_IMAGE_SURFACE) { return(false); } } return(true); }
/// <summary> /// Initializes a new instance of the <see cref="PikkartAR.RecognitionOptions"/> class. /// </summary> /// <param name="storage">Recognition Storage Mode.</param> public RecognitionOptions(RecognitionStorage storage, RecognitionMode mode, CloudRecognitionInfo cloudAuthInfo) { //if (storage == RecognitionStorage.GLOBAL && // mode == RecognitionMode.CONTINUOUS_SCAN) // throw new System.Exception("Continuous scan recognition mode is not supported with global storage"); _storage = storage; _mode = mode; _cloudInfo = cloudAuthInfo; }
public void SelectMode(RecognitionMode speechRecognitionModeToSet) { speechRecognitionMode = speechRecognitionModeToSet; onSelectRecognitionMode(speechRecognitionMode); if (speechRecognitionMode == RecognitionMode.Disabled) { if (outputText.text == "Say something..." || outputText.text == "") { outputText.text = "Select a mode to begin."; } } }
/// <summary> /// Changes recognition mode /// </summary> /// <returns>Server response</returns> public Dictionary <string, string> modeChange(RecognitionMode mode) { string oRequest = ""; if (mode == RecognitionMode.multi) { oRequest = "<mode xsi:type=\"xsd:string\">Multi</mode>"; } else { oRequest = "<mode xsi:type=\"xsd:string\">Single</mode>"; } return(sendSoapRequest(oRequest, "modeChange")); }
public void HandleOnSelectRecognitionMode(RecognitionMode recognitionMode) { if (recognitionMode == RecognitionMode.Disabled) { BeginRecognizing(); } else { if (recognizer != null) { recognizer.StopContinuousRecognitionAsync(); } recognizer = null; recognizedString = ""; } }
public static readonly int MAX_PROPAGATION_DURATION = 3000000; //usec public Recognizer(Smart smart, RecognitionMode recogMode = RecognitionMode.RECOGNITION_MODE_TARGET_TRACKING, SceneMappingInitMode initMode = SceneMappingInitMode.SCENE_MAPPING_INIT_MODE_TARGET) { self_ = sarSmartar_SarRecognizer_SarRecognizer(smart.self_, recogMode, initMode); //--------------------------------------------------------------- //================================================================= thisObj_ = this; //================================================================= var workDispatchedListenerDelegate = new WorkDispatchedListenerDelegate(OnWorkDispatched); proxyListenerDelegates_.workDispatchedListenerDelegate_ = Marshal.GetFunctionPointerForDelegate(workDispatchedListenerDelegate); var recognizedListenerDelegate = new RecognizedListenerDelegate(OnRecognized); proxyListenerDelegates_.recognizedListenerDelegate_ = Marshal.GetFunctionPointerForDelegate(recognizedListenerDelegate); sarSmartar_SarRecognizerProxyListeners_sarCreate(ref proxyListenerDelegates_, out proxyListeners_); //--------------------------------------------------------------- }
public void HandleOnSelectRecognitionMode(RecognitionMode recognitionMode) { Debug.Log("HandleOnSelectRecognition called"); if (recognitionMode == RecognitionMode.Speech_Recognizer) { BeginRecognizing(); } else { if (recognizer != null) { recognizer.StopContinuousRecognitionAsync(); } recognizer = null; recognizedString = ""; } }
public void HandleOnSelectRecognitionMode(RecognitionMode recognitionMode) { if (recognitionMode == RecognitionMode.Intent_Recognizer) { recognizedString = "Say something..."; BeginRecognizing(); } else { if (capturingAudio) { StopCapturingAudio(); } recognizedString = ""; commandCaptured = false; } }
public void HandleOnSelectRecognitionMode(RecognitionMode recognitionMode) { if (recognitionMode == RecognitionMode.Tralation_Recognizer) { recognizedString = "Say something..."; translatedString = ""; BeginTranslating(); } else { if (translator != null) { translator.StopContinuousRecognitionAsync(); } translator = null; recognizedString = ""; translatedString = ""; } }
public recognitionResponse recognize1(byte[] byteArr, RecognitionMode mode, bool all) { string url = "http://recognize.im/v2/recognize/"; url += "multi/"; url += "all/"; HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url + this.clientId); request.Method = "POST"; request.Headers["x-itraff-hash"] = getMD5(this.apiKey, byteArr); request.ContentType = "image/jpeg"; request.Accept = "application/json"; request.ContentLength = byteArr.Length; //send request using (Stream requestStream = request.GetRequestStream()) { requestStream.Write(byteArr, 0, byteArr.Length); } //get response using (WebResponse response = request.GetResponse()) { using (Stream stream = response.GetResponseStream()) { //read json response StreamReader reader = new StreamReader(stream, Encoding.UTF8); String responseString = reader.ReadToEnd(); //deserialize json response into recognitionResponse var jss = new JavaScriptSerializer(); var dict = jss.Deserialize <recognitionResponse>(responseString); return(dict); } } }
private async Task ChangeScenarioStateAsync(RecognitionMode newState) { switch (newState) { case RecognitionMode.Idle: _currentState = newState; await ShutdownWebcamAsync(); VisualizationCanvas.Children.Clear(); break; case RecognitionMode.Recognizing: if (!await StartWebcamStreamingAsync()) { await ChangeScenarioStateAsync(RecognitionMode.Idle); break; } VisualizationCanvas.Children.Clear(); _currentState = newState; break; } }
public void ChangeMode(RecognitionMode InpMode) { Mode = InpMode; }
private static extern IntPtr sarSmartar_SarRecognizer_SarRecognizer(IntPtr smart, RecognitionMode recogMode, SceneMappingInitMode initMode);
private void PrepareForSynchronousRecognition() { _recognitionMode = RecognitionMode.Synchronous; LogLine("Ready for synchronous recognition"); }
/// <summary> /// Sends image recognition request. /// </summary> /// <param name="imagePath">Path to the image file.</param> /// <param name="mode">Recognize mode</param> /// <returns>Server response</returns> public recognitionResponse recognize(string imagePath, RecognitionMode mode, bool all) { //open image FileStream imageStream = File.OpenRead(imagePath); byte[] data = new byte[imageStream.Length]; imageStream.Read(data, 0, data.Length); if (!checkImageLimits(imageStream, mode)) { throw new Exception("Image Limits exception"); } imageStream.Close(); //create request string url = "http://recognize.im/v2/recognize/"; //if (mode == RecognitionMode.multi) //{ // url += "multi/"; //} //else //{ url += "single/"; //} //if (all) //{ url += "all/"; //} HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url + this.clientId); request.Method = "POST"; request.Headers["x-itraff-hash"] = getMD5(this.apiKey, data); request.ContentType = "image/jpeg"; request.Accept = "application/json"; request.ContentLength = data.Length; //send request using (Stream requestStream = request.GetRequestStream()) { requestStream.Write(data, 0, data.Length); } //get response using (WebResponse response = request.GetResponse()) { using (Stream stream = response.GetResponseStream()) { //read json response StreamReader reader = new StreamReader(stream, Encoding.UTF8); String responseString = reader.ReadToEnd(); //deserialize json response into recognitionResponse var jss = new JavaScriptSerializer(); var dict = jss.Deserialize <recognitionResponse>(responseString); return(dict); } } }
/// <summary> /// Sends image recognition request. /// </summary> /// <param name="imagePath">Path to the image file.</param> /// <param name="mode">Recognize mode</param> /// <returns>Server response</returns> public recognitionResponse recognize(string imagePath, RecognitionMode mode, bool all) { //open image FileStream imageStream = File.OpenRead(imagePath); byte[] data = new byte[imageStream.Length]; imageStream.Read(data, 0, data.Length); if (!checkImageLimits(imageStream, mode)) { throw new Exception("Image Limits exception"); } imageStream.Close(); //create request string url = "http://recognize.im/v2/recognize/"; if (mode == RecognitionMode.multi) { url += "multi/"; } else { url += "single/"; } if (all) { url += "all/"; } HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url + this.clientId); request.Method = "POST"; request.Headers["x-itraff-hash"] = getMD5(this.apiKey, data); request.ContentType = "image/jpeg"; request.Accept = "application/json"; request.ContentLength = data.Length; //send request using (Stream requestStream = request.GetRequestStream()) { requestStream.Write(data, 0, data.Length); } //get response using (WebResponse response = request.GetResponse()) { using (Stream stream = response.GetResponseStream()) { //read json response StreamReader reader = new StreamReader(stream, Encoding.UTF8); String responseString = reader.ReadToEnd(); //deserialize json response into recognitionResponse var jss = new JavaScriptSerializer(); var dict = jss.Deserialize<recognitionResponse>(responseString); return dict; } } }
/// <summary> /// Changes recognition mode /// </summary> /// <returns>Server response</returns> public Dictionary<string, string> modeChange(RecognitionMode mode) { string oRequest = ""; if (mode == RecognitionMode.multi) { oRequest = "<mode xsi:type=\"xsd:string\">Multi</mode>"; } else { oRequest = "<mode xsi:type=\"xsd:string\">Single</mode>"; } return sendSoapRequest(oRequest, "modeChange"); }
/// <summary> /// Checks the image limits for given recognize mode. /// </summary> /// <param name="imageStream">Image file stream.</param> /// <param name="mode">Recognize mode</param> /// <returns>Server response</returns> public bool checkImageLimits(FileStream imageStream, RecognitionMode mode) { Image image = Image.FromStream(imageStream); double imageSurface = (double)(image.Height * image.Width) / 1000000.0; double fileSize = imageStream.Length / 1000.0; if (mode == RecognitionMode.single) { if (fileSize > SINGLEIR_MAX_FILE_SIZE || image.Height < SINGLEIR_MIN_DIMENSION || image.Width < SINGLEIR_MIN_DIMENSION || imageSurface < SINGLEIR_MIN_IMAGE_SURFACE || imageSurface > SINGLEIR_MAX_IMAGE_SURFACE) { return false; } } else if (mode == RecognitionMode.multi) { if (fileSize > MULTIPLEIR_MAX_FILE_SIZE || image.Height < MULTIPLEIR_MIN_DIMENSION || image.Width < MULTIPLEIR_MIN_DIMENSION || imageSurface < MULTIPLEIR_MIN_IMAGE_SURFACE || imageSurface > MULTIPLEIR_MAX_IMAGE_SURFACE) { return false; } } return true; }
public void ChangeMode(RecognitionMode InpMode, int ScreenWidth, int ScreenHeight) { PrimaryScreenHeight = ScreenHeight; PrimaryScreenWidth = ScreenWidth; Mode = InpMode; }