/// <summary> /// Text recognition on the cloud. If you want to use cloud text analyzer, /// you need to apply for an agconnect-services.json file /// in the developer alliance(https://developer.huawei.com/consumer/en/doc/development/HMS-Guides/ml-add-agc), /// add agconnect-services.json to Assets folder in the project. /// </summary> private async void RemoteAnalyzer() { // Set the list of languages to be recognized. IList <string> languageList = new List <string>(); languageList.Add("zh"); languageList.Add("en"); // Create an analyzer. You can customize the analyzer by creating MLRemoteTextSetting MLRemoteTextSetting setting = new MLRemoteTextSetting.Factory() .SetTextDensityScene(MLRemoteTextSetting.OcrCompactScene) .SetLanguageList(languageList) .SetBorderType(MLRemoteTextSetting.Arc) .Create(); this.analyzer = MLAnalyzerFactory.Instance.GetRemoteTextAnalyzer(setting); // Use default parameter settings. //analyzer = MLAnalyzerFactory.Instance.RemoteTextAnalyzer; // Create an MLFrame by using Android.Graphics.Bitmap. Bitmap bitmap = BitmapFactory.DecodeResource(this.Resources, Resource.Drawable.text_image); MLFrame frame = MLFrame.FromBitmap(bitmap); Task <MLText> task = this.analyzer.AnalyseFrameAsync(frame); try { await task; if (task.IsCompleted && task.Result != null) { // Analyze success. var result = task.Result; this.RemoteDisplaySuccess(result); } else { // Analyze failure. Log.Info(Tag, " Analyze failure "); } } catch (Exception e) { // Operation failure. Log.Info(Tag, " Operation failure: " + e.Message); this.DisplayFailure(e); } }
/// <summary> /// Text recognition on the device /// </summary> private async void LocalAnalyzer() { // Create the text analyzer MLTextAnalyzer to recognize characters in images. You can set MLLocalTextSetting to // specify languages that can be recognized. // If you do not set the languages, only Romance languages can be recognized by default. // Use default parameter settings to configure the on-device text analyzer. Only Romance languages can be // recognized. // analyzer = MLAnalyzerFactory.Instance.LocalTextAnalyzer; // Use the customized parameter MLLocalTextSetting to configure the text analyzer on the device. MLLocalTextSetting setting = new MLLocalTextSetting.Factory() .SetOCRMode(MLLocalTextSetting.OcrDetectMode) .SetLanguage("en") .Create(); this.analyzer = MLAnalyzerFactory.Instance.GetLocalTextAnalyzer(setting); // Create an MLFrame by using android.graphics.Bitmap. Bitmap bitmap = BitmapFactory.DecodeResource(Resources, Resource.Drawable.text_image); MLFrame frame = MLFrame.FromBitmap(bitmap); Task <MLText> task = this.analyzer.AnalyseFrameAsync(frame); try { await task; if (task.IsCompleted && task.Result != null) { // Analyze success. var result = task.Result; this.DisplaySuccess(result); } else { // Analyze failure. Log.Info(Tag, " Analyze failure "); } } catch (Exception e) { // Operation failure. Log.Info(Tag, " Operation failure: " + e.Message); this.DisplayFailure(e); } }