protected override void OnCreate(Bundle savedInstanceState) { Log.Info(TAG, "called onCreate"); base.OnCreate(savedInstanceState); //string APPLICATION_RAW_PATH = "android.resource://" + PackageName + "/"; ISharedPreferences preference = Android.Preferences.PreferenceManager.GetDefaultSharedPreferences(ApplicationContext); FileInfo eyeFile = Emgu.Util.AndroidFileAsset.WritePermanantFileAsset(this, "haarcascade_eye.xml", "cascade", 0); FileInfo faceFile = Emgu.Util.AndroidFileAsset.WritePermanantFileAsset(this, "haarcascade_frontalface_alt_tree.xml", "cascade", 0); //string[] items = System.IO.Directory.GetFiles("../../Resources/raw"); //BREAKS S***E try { //using (var istr = Resources.OpenRawResource(Resource.Raw.haarcascade_frontalface_alt_tree)) //{ /* * Java.IO.File cascadeDir = GetDir("raw", FileCreationMode.Private); * Java.IO.File mCascadeFile = new Java.IO.File(cascadeDir, "haarcascade_frontalface_alt_tree.xml"); * using (Java.IO.FileOutputStream os = new Java.IO.FileOutputStream(mCascadeFile)) * { * int byteRead; * while ((byteRead = istr.ReadByte()) != -1) * os.Write(byteRead); * }*/ //var tmp = Resource.Raw.haarcascade_frontalface_alt_tree; FACE = new CascadeClassifier(faceFile.FullName); // preference.GetString("cascade-face-data-path", null)); //(APPLICATION_RAW_PATH + tmp); /* * mCascadeFile = new Java.IO.File(cascadeDir, "haarcascade_eye.xml"); * using (Java.IO.FileOutputStream os = new Java.IO.FileOutputStream(mCascadeFile)) * { * int byteRead; * while ((byteRead = istr.ReadByte()) != -1) * os.Write(byteRead); * }*/ //tmp = Resource.Raw.haarcascade_eye; EYE = new CascadeClassifier(eyeFile.FullName); //} //if (FACE.isEmpty| EYE.isEmpty) // Log.Error(TAG, "Empty cascade"); } catch (IOException e) { Log.Error(TAG, "Failed to load cascade. Exception thrown: " + e.StackTrace); } RequestWindowFeature(WindowFeatures.NoTitle); Window.AddFlags(WindowManagerFlags.Fullscreen); Window.AddFlags(WindowManagerFlags.KeepScreenOn); SetContentView(Resource.Layout.color_blob_detection_surface_view); mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.color_blob_detection_activity_surface_view); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, this); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); Window.AddFlags(WindowManagerFlags.KeepScreenOn); SetContentView(Resource.Layout.CameraPreview); _openCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.surfaceView); _openCvCameraView.Visibility = ViewStates.Visible; _openCvCameraView.SetCvCameraViewListener(this); }
protected override void OnCreate(Bundle savedInstanceState) { Log.Info(TAG, "called onCreate"); base.OnCreate(savedInstanceState); Window.AddFlags(WindowManagerFlags.KeepScreenOn); SetContentView(Resource.Layout.ImageManipulation); mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.ManipulationView); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, mOpenCvCameraView); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); Window.AddFlags(WindowManagerFlags.KeepScreenOn); Log.Debug(ActivityTags.Puzzle, "Creating and setting view"); _openCvCameraView = new JavaCameraView(this, -1) as CameraBridgeViewBase; SetContentView(_openCvCameraView); _openCvCameraView.Visibility = ViewStates.Visible; _openCvCameraView.SetCvCameraViewListener(this); _puzzle15 = new Puzzle15Processor(); _puzzle15.PrepareNewGame(); }
protected override void OnDestroy() { base.OnDestroy(); if (_openCvCameraView != null) { _openCvCameraView.DisableView(); _openCvCameraView = null; _openCV = null; rgba = null; roi = null; } }
/** Called when the activity is first created. */ protected override void OnCreate(Bundle savedInstanceState) { Log.Info(ActivityTags.FaceDetect, "called onCreate"); base.OnCreate(savedInstanceState); Window.AddFlags(WindowManagerFlags.KeepScreenOn); SetContentView(Resource.Layout.face_detect_surface_view); mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.fd_activity_surface_view); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, this, mOpenCvCameraView); }
protected override void OnCreate(Bundle savedInstanceState) { Log.Info("BlobDetection", "called onCreate"); base.OnCreate(savedInstanceState); RequestWindowFeature(WindowFeatures.NoTitle); Window.AddFlags(WindowManagerFlags.KeepScreenOn); SetContentView(Resource.Layout.color_blob_detection_surface_view); mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.color_blob_detection_activity_surface_view); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, this); }
protected override async void OnCreate(Bundle savedInstanceState) { TabLayoutResource = Resource.Layout.Tabbar; ToolbarResource = Resource.Layout.Toolbar; base.OnCreate(savedInstanceState); MobileBarcodeScanner.Initialize(this.Application); SmsAndroid.dev = this; await CrossMedia.Current.Initialize(); CrossFingerprint.SetCurrentActivityResolver(() => CrossCurrentActivity.Current.Activity); CrossCurrentActivity.Current.Init(this, savedInstanceState); LocalNotificationsImplementation.NotificationIconId = Resource.Drawable.icon; Xamarin.Essentials.Platform.Init(this, savedInstanceState); global::Xamarin.Forms.Forms.Init(this, savedInstanceState); Xamarin.FormsMaps.Init(this, savedInstanceState); CachedImageRenderer.Init(true); NfcManager NfcManager = (NfcManager)Android.App.Application.Context.GetSystemService(Context.NfcService); NFCdevice = NfcManager.DefaultAdapter; CrossNFC.Init(this); Xamarin.Forms.DependencyService.Register <INfcForms, NfcForms>(); x = Xamarin.Forms.DependencyService.Get <INfcForms>() as NfcForms; #if portifolio CrossBadge.Current.SetBadge(9); LoadApplication(new App(new SoapService(), new OpenCVActivity(), new SmsAndroid())); #else if (!OpenCVLoader.InitDebug()) { System.Console.WriteLine("Init OpenCV failed!!"); } else { System.Console.WriteLine("Init OpenCV succefuly!!"); } // Set our view from the "main" layout resource SetContentView(Resource.Layout.Main); mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.fd_activity_surface_view); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, this, mOpenCvCameraView); #endif }
protected override void OnCreate(Bundle savedInstanceState) { Log.Info(TAG, "called onCreate"); base.OnCreate(savedInstanceState); Window.AddFlags(WindowManagerFlags.KeepScreenOn); RequestWindowFeature(WindowFeatures.NoTitle); SetContentView(Resource.Layout.activity_cameraview); RequestedOrientation = Android.Content.PM.ScreenOrientation.Landscape; //Check if permission is already granted if (Android.Support.V4.Content.ContextCompat.CheckSelfPermission(this, Android.Manifest.Permission.Camera) != Android.Content.PM.Permission.Granted) { // Give first an explanation, if needed. if (Android.Support.V4.App.ActivityCompat.ShouldShowRequestPermissionRationale(this, Android.Manifest.Permission.Camera)) { // Show an explanation to the user *asynchronously* -- don't block // this thread waiting for the user's response! After the user // sees the explanation, try again to request the permission. } else { // No explanation needed, we can request the permission. Android.Support.V4.App.ActivityCompat.RequestPermissions(this, new String[] { Android.Manifest.Permission.Camera }, 1); } } mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.cameraview_id); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener(this); mLoaderCallback = new Callback(this, mOpenCvCameraView); // Force fullscreen Window.DecorView.SystemUiVisibility = Android.Views.StatusBarVisibility.Hidden; Window.AddFlags(WindowManagerFlags.Fullscreen); Window.ClearFlags(WindowManagerFlags.ForceNotFullscreen); }
protected override async void OnCreate(Bundle savedInstanceState) { TabLayoutResource = Resource.Layout.Tabbar; ToolbarResource = Resource.Layout.Toolbar; base.OnCreate(savedInstanceState); MobileBarcodeScanner.Initialize(this.Application); await CrossMedia.Current.Initialize(); CrossFingerprint.SetCurrentActivityResolver(() => CrossCurrentActivity.Current.Activity); CrossCurrentActivity.Current.Init(this, savedInstanceState); LocalNotificationsImplementation.NotificationIconId = Resource.Drawable.icon; Xamarin.Essentials.Platform.Init(this, savedInstanceState); global::Xamarin.Forms.Forms.Init(this, savedInstanceState); Xamarin.FormsMaps.Init(this, savedInstanceState); CachedImageRenderer.Init(true); if (!OpenCVLoader.InitDebug()) { System.Console.WriteLine("Init OpenCV failed!!"); } else { System.Console.WriteLine("Init OpenCV succefuly!!"); } // Set our view from the "main" layout resource SetContentView(Resource.Layout.Main); mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.fd_activity_surface_view); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, this, mOpenCvCameraView); }
public Callback(Context context, CameraBridgeViewBase cameraView) : base(context) { mOpenCvCameraView = cameraView; }
public Callback(OpenCVActivity activity, Context context, CameraBridgeViewBase view) : base(context) { _activity = activity; mOpenCvCameraView = view; }
protected override async void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); //Set View SetContentView(Resource.Layout.TestArea); //Open CV mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.TestGreyView); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, mOpenCvCameraView); //Textview textseekThresh = FindViewById <TextView>(Resource.Id.TestAreaTextSeek1); textseekBlur = FindViewById <TextView>(Resource.Id.TestAreaTextSeek2); textseekSize = FindViewById <TextView>(Resource.Id.TestAreaTextSeek3); textResult = FindViewById <TextView>(Resource.Id.TestAreaResultText); //Get Buttons buttonDetectText = FindViewById <Button>(Resource.Id.TestAreaButtonDetectText); buttonSelectImageGallery = FindViewById <Button>(Resource.Id.TestAreaSelectImageGallery); buttonExtractText = FindViewById <Button>(Resource.Id.TestAreaButtonExtractText); buttonGrey = FindViewById <Button>(Resource.Id.TestAreaButtonGrey); buttonSize = FindViewById <Button>(Resource.Id.TestAreaButtonSize); //ImageView imgInput = FindViewById <ImageView>(Resource.Id.TestAreaImageView); imgResult = FindViewById <ImageView>(Resource.Id.TestAreaImageResultGrey); //Event Listeners buttonGrey.Click += delegate { Bitmap img = ((BitmapDrawable)imgInput.Drawable).Bitmap; double thresh = Convert.ToDouble(textseekThresh.Text.ToString()); double blur = Convert.ToDouble(textseekBlur.Text.ToString()); Bitmap result = ImageOp.greyImg(img, thresh, blur); imgResult.SetImageBitmap(result); }; buttonDetectText.Click += delegate { Bitmap img = ((BitmapDrawable)imgResult.Drawable).Bitmap; Bitmap result = ImageOp.detectTextRect(img); imgResult.SetImageBitmap(result); }; buttonExtractText.Click += async delegate { Bitmap img = ((BitmapDrawable)imgResult.Drawable).Bitmap; string result = await ImageOp.detectAndExtractText(img); textResult.Text = result; }; buttonSize.Click += delegate { Bitmap img = ((BitmapDrawable)imgInput.Drawable).Bitmap; double size = Convert.ToDouble(textseekSize.Text.ToString()); size = size / 100; Bitmap imgTemp = ImageOp.resizeImage(img, size, size); double thresh = Convert.ToDouble(textseekThresh.Text.ToString()); double blur = Convert.ToDouble(textseekBlur.Text.ToString()); Bitmap result = ImageOp.greyImg(imgTemp, thresh, blur); imgResult.SetImageBitmap(result); }; buttonSelectImageGallery.Click += delegate { var imageIntent = new Intent(); imageIntent.SetType("image/*"); imageIntent.SetAction(Intent.ActionGetContent); StartActivityForResult(Intent.CreateChooser(imageIntent, "Select photo"), 0); }; //Slider seekThresh = FindViewById <SeekBar>(Resource.Id.TestAreaSeekBar1); seekBlur = FindViewById <SeekBar>(Resource.Id.TestAreaSeekBar2); seekSize = FindViewById <SeekBar>(Resource.Id.TestAreaSeekBar3); //Slider Listener seekThresh.ProgressChanged += (object sender, SeekBar.ProgressChangedEventArgs e) => { if (e.FromUser) { textseekThresh.Text = string.Format("" + e.Progress); } }; seekBlur.ProgressChanged += (object sender, SeekBar.ProgressChangedEventArgs e) => { if (e.FromUser) { textseekBlur.Text = string.Format("" + e.Progress); } }; seekSize.ProgressChanged += (object sender, SeekBar.ProgressChangedEventArgs e) => { if (e.FromUser) { textseekSize.Text = string.Format("" + e.Progress); } }; // Tesseract Api einmal bei start erzeugen api = new TesseractApi(this, AssetsDeployment.OncePerVersion); bool check = await OCR.initTes(api); }