/** * Creates and starts the camera. Note that this uses a higher resolution in comparison * to other detection examples to enable the barcode detector to detect small barcodes * at long distances. */ private void CreateCameraSource() { var context = Application.Context; FaceDetector detector = new FaceDetector.Builder(context) .SetClassificationType(ClassificationType.All) .Build(); detector.SetProcessor( new MultiProcessor.Builder(this) .Build()); if (!detector.IsOperational) { // Note: The first time that an app using face API is installed on a device, GMS will // download a native library to the device in order to do detection. Usually this // completes before the app is run for the first time. But if that download has not yet // completed, then the above call will not detect any faces. // // isOperational() can be used to check if the required native library is currently // available. The detector will automatically become operational once the library // download completes on device. Log.Warn(TAG, "Face detector dependencies are not yet available."); } mCameraSource = new CameraSource.Builder(context, detector) .SetRequestedPreviewSize(640, 480) .SetFacing(CameraFacing.Front) .SetRequestedFps(30.0f) .Build(); }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); SetContentView(Resource.Layout.FaceTracker); _cameraSourcePreview = FindViewById <CameraSourcePreview>(Resource.Id.cameraSourcePreview); _overlay = FindViewById <GraphicOverlay>(Resource.Id.faceOverlay); var detector = new FaceDetector.Builder(Application.Context) //.SetTrackingEnabled(false) .SetLandmarkType(LandmarkDetectionType.All) .SetMode(FaceDetectionMode.Accurate) .Build(); detector.SetProcessor( new MultiProcessor.Builder(new FaceTrackerFactory(_overlay)).Build()); _cameraSource = new CameraSource.Builder(this, detector) .SetAutoFocusEnabled(true) //.SetRequestedPreviewSize(640, 480) .SetFacing(CameraFacing.Front) .SetRequestedFps(30.0f) .Build(); }
/** * Creates the face detector and the camera. */ private void createCameraSource() { Context context = Application.Context; FaceDetector detector = new FaceDetector.Builder(context) .SetClassificationType(ClassificationType.All) .Build(); detector.SetProcessor( new Builder(this) .Build()); int facing = (int)CameraFacing.Front; if (!mIsFrontFacing) { facing = (int)CameraFacing.Back; } mCameraSource = new CameraSource.Builder(context, detector) .SetFacing(CameraFacing.Front) .SetRequestedPreviewSize(640, 480) .SetRequestedFps(60.0f) .SetAutoFocusEnabled(true) .Build(); }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); SetContentView(Resource.Layout.Main); mPreview = FindViewById <CameraSourcePreview>(Resource.Id.preview); mGraphicOverlay = FindViewById <GraphicOverlay>(Resource.Id.faceOverlay); var detector = new FaceDetector.Builder(Application.Context).Build(); detector.SetProcessor( new MultiProcessor.Builder(new GraphicFaceTrackerFactory(mGraphicOverlay)).Build()); if (!detector.IsOperational) { // Note: The first time that an app using face API is installed on a device, GMS will // download a native library to the device in order to do detection. Usually this // completes before the app is run for the first time. But if that download has not yet // completed, then the above call will not detect any faces. // // isOperational() can be used to check if the required native library is currently // available. The detector will automatically become operational once the library // download completes on device. Android.Util.Log.Warn(TAG, "Face detector dependencies are not yet available."); } mCameraSource = new CameraSource.Builder(Application.Context, detector) .SetRequestedPreviewSize(640, 480) .SetFacing(CameraFacing.Back) .SetRequestedFps(30.0f) .Build(); }
public void FaceDetector_Builder() { var stillFaceDetector = new FaceDetector.Builder(Android.App.Application.Context) .SetTrackingEnabled(true) .SetMode(FaceDetectionMode.Accurate) .SetLandmarkType(LandmarkDetectionType.None) .Build(); Assert.True(stillFaceDetector != null); }
private FaceDetector CreateFaceDetector(Context context) { FaceDetector detector = new FaceDetector.Builder(context) .SetLandmarkType(LandmarkDetectionType.All) .SetClassificationType(ClassificationType.All) .SetTrackingEnabled(true) .SetMode(FaceDetectionMode.Fast) .SetProminentFaceOnly(mIsFrontFacing) .SetMinFaceSize(mIsFrontFacing ? 0.35f : 0.15f) .Build(); //Detector.IProcessor processor; Detector.IProcessor processor; if (mIsFrontFacing) { Tracker tracker = new GooglyFaceTracker(mGraphicOverlay); processor = new LargestFaceFocusingProcessor.Builder(detector, tracker).Build(); } else { /*MultiProcessor.IFactory factory = new MultiProcessor.IFactory(){ * * public override Tracker Create(Face face) * { * return new GooglyFaceTracker(mGraphicOverlay); * } * }; * processor = new MultiProcessor.Builder(factory).Build();*/ Tracker tracker = new GooglyFaceTracker(mGraphicOverlay); processor = new LargestFaceFocusingProcessor.Builder(detector, tracker).Build(); } detector.SetProcessor(processor); if (!detector.IsOperational) { Log.Warn(TAG, "Face detector dependencies are not yet available."); Toast.MakeText(this, Resource.String.low_storage_error, ToastLength.Long).Show(); /* * IntentFilter lowStorageFilter = (IntentFilter)ActionDeviceStorageLow; * bool hasLowStorage = RegisterReceiver(null, lowStorageFilter) != null; * * if (hasLowStorage) { * Toast.MakeText(this, Resource.String.low_storage_error, ToastLength.Long).Show(); * Log.Warn(TAG, GetString(Resource.String.low_storage_error)); * }*/ } return(detector); }
public void CreateCameraSource(CameraFacing cameraFacing) { FaceDetector detector = new FaceDetector.Builder(this).Build(); detector.SetProcessor(new MultiProcessor.Builder(this).Build()); if (!detector.IsOperational) { Log.Warn(TAG, "Face detector isn't configured yet"); } cameraSource = new CameraSource.Builder(this, detector) .SetRequestedPreviewSize(640, 480) .SetFacing(cameraFacing) .SetAutoFocusEnabled(true) .SetRequestedFps(30.0f) .Build(); }
private void CreateCameraSource() { var context = Application.Context; FaceDetector detector = new FaceDetector.Builder(context) .SetClassificationType(ClassificationType.All) .Build(); detector.SetProcessor(new MultiProcessor.Builder(this).Build()); if (!detector.IsOperational) { Log.Warn(TAG, "Face detector dependencies are not yet available."); } mCameraSource = new CameraSource.Builder(context, detector) .SetFacing(CameraFacing.Back) .SetRequestedFps(60.0f) .SetAutoFocusEnabled(true) .Build(); }
private void DetectFace() { OnboardingActivity.ShowProgressDialog(); try { FaceDetector detector = new FaceDetector.Builder(Context) .SetMode(FaceDetectionMode.Accurate) .SetClassificationType(ClassificationType.All) .SetLandmarkType(LandmarkDetectionType.All) .SetProminentFaceOnly(true) .SetTrackingEnabled(false) .Build(); if (!detector.IsOperational) { //Handle contingency } else { Frame frame = new Frame.Builder() .SetBitmap(bitmap) .Build(); var faces = detector.Detect(frame); detector.Release(); DetectedResponse(bitmap, faces); } SaveImage(); } catch (Exception e) { OnboardingActivity.CloseProgressDialog(); OnboardingActivity.ShowErrorDialog(e.Message); } }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); SetContentView(Resource.Layout.FaceDetector); preview = FindViewById <CameraPreview>(Resource.Id.preview); graphicOverlay = FindViewById <GraphicOverlay>(Resource.Id.faceOverlay); FaceDetector detector = new FaceDetector.Builder(Application.Context).Build(); detector.SetProcessor(new MultiProcessor.Builder(new TrackingFacesGraphics(graphicOverlay)).Build()); if (!detector.IsOperational) { //TODO Face detection is not available } cameraSource = new CameraSource.Builder(Application.Context, detector) .SetRequestedPreviewSize(640, 480) .SetFacing(CameraFacing.Front) .SetRequestedFps(30.0f) .Build(); }
private void CreateCameraSource(bool usecustomdetector) { if (camface == CameraFacing.Front) //fixes darkness issues for front, may help back camera also { _recordFps = 30; } var context = Application.Context; FaceDetector detector = new FaceDetector.Builder(context) //consider moving to background thread .SetTrackingEnabled(true) .SetClassificationType(ClassificationType.All) .SetProminentFaceOnly(true) .SetMinFaceSize((float)0.2) .Build(); if (usecustomdetector) { try { _allFrameData = new SortedList <float, FrameData>(); CompressDataTasks = new List <Task>(); var myFaceDetector = new CustomFaceDetector(detector, ref _allFrameData, ref CompressDataTasks, compressquality); //myFaceDetector.PropertyChanged += OnPropertyChanged; myFaceDetector.SetProcessor( new LargestFaceFocusingProcessor.Builder(myFaceDetector, new GraphicFaceTracker(this.mGraphicOverlay)) .Build()); if (!myFaceDetector.IsOperational) { // isOperational() can be used to check if the required native library is currently // available. The detector will automatically become operational once the library // download completes on device. Log.Warn(TAG, "Face detector dependencies are not yet available."); } mCameraSource = new CameraSource.Builder(context, myFaceDetector) .SetRequestedPreviewSize(pFramewidth, pFrameHeight) .SetFacing(camface) .SetRequestedFps(_recordFps) .SetAutoFocusEnabled(true) .Build(); } catch (Exception e) { } } else { detector.SetProcessor( new LargestFaceFocusingProcessor.Builder(detector, new GraphicFaceTracker(this.mGraphicOverlay)) .Build()); if (!detector.IsOperational) { // isOperational() can be used to check if the required native library is currently // available. The detector will automatically become operational once the library // download completes on device. Log.Warn(TAG, "Face detector dependencies are not yet available."); } mCameraSource = new CameraSource.Builder(context, detector) .SetRequestedPreviewSize(pFramewidth, pFrameHeight) .SetFacing(camface) .SetRequestedFps(_recordFps) .SetAutoFocusEnabled(true) .Build(); } }
public static View getProcessView(View givenView, Android.Content.Res.Resources Resources) { faceImage = (ImageView)givenView.FindViewById(Resource.Id.ImageViewFace); if (faceBitmap == null) { faceBitmap = BitmapFactory.DecodeResource(Resources, Resource.Drawable.memo); faceBitmap = changeBitmapSize(faceBitmap, 1500); faceImage.SetImageBitmap(faceBitmap); tempBitmap = Bitmap.CreateBitmap(faceBitmap.Width, faceBitmap.Height, Bitmap.Config.Rgb565); canvas = new Canvas(tempBitmap); canvas.DrawBitmap(faceBitmap, 0, 0, null); } faceImage.SetImageDrawable(new BitmapDrawable(Resources, tempBitmap)); if (flowers == null) { flowers = BitmapFactory.DecodeResource(Resources, Resource.Drawable.flowers); } if (goggles == null) { goggles = BitmapFactory.DecodeResource(Resources, Resource.Drawable.glass3); } if (smoke == null) { smoke = BitmapFactory.DecodeResource(Resources, Resource.Drawable.cigarette); } buttonProcess = (Button)givenView.FindViewById(Resource.Id.buttonProcess); buttonClear = (Button)givenView.FindViewById(Resource.Id.buttonClear); buttonSave = (ImageButton)givenView.FindViewById(Resource.Id.buttonSave); AlertDialog.Builder builder = new AlertDialog.Builder(givenView.Context) .SetTitle("Save the image") .SetMessage("Are you sure to save?") .SetPositiveButton("Yes", (senderAlert, args) => { saveImage(); if (isCompressSuccessful) { Toast.MakeText(givenView.Context, "Successfuly saved!", ToastLength.Short).Show(); } else { Toast.MakeText(givenView.Context, "Failed!", ToastLength.Short).Show(); } }) .SetNegativeButton("No", (senderAlert, args) => {}); buttonProcess.Click += delegate { positions.Clear(); FaceDetector faceDetector = new FaceDetector.Builder(Android.App.Application.Context) .SetTrackingEnabled(true) .SetLandmarkType(LandmarkDetectionType.All) .SetMode(FaceDetectionMode.Fast) .Build(); if (!faceDetector.IsOperational) { Toast.MakeText(givenView.Context, "Error about faceDetector.", ToastLength.Long); return; } Frame frame = new Frame.Builder().SetBitmap(faceBitmap).Build(); SparseArray sparseArray = faceDetector.Detect(frame); for (int i = 0; i < sparseArray.Size(); i++) { Face face = (Face)sparseArray.ValueAt(i); DetectLandMarks(face); } ; int leftEyeX = positions.Find(x => x.id == "leftEye").positionX; int rightEyeX = positions.Find(x => x.id == "rightEye").positionX; int rightEyeY = positions.Find(x => x.id == "rightEye").positionY; int eyeWidth = leftEyeX - rightEyeX; goggles = changeBitmapSize(goggles, eyeWidth * 2); int glassScaleHeight = goggles.GetScaledHeight(canvas); canvas.DrawBitmap(goggles, rightEyeX - (eyeWidth / 2) + 5, rightEyeY - (glassScaleHeight / 2) - 2, null); flowers = changeBitmapSize(flowers, eyeWidth * 2); int flowersScaleWidth = flowers.GetScaledWidth(canvas); int mouthY = positions.Find(x => x.id == "bottomMouth").positionY; int noseX = positions.Find(x => x.id == "nose").positionX; int eyeMouthDifference = mouthY - rightEyeY; canvas.DrawBitmap(flowers, noseX - (flowersScaleWidth / 2), rightEyeY - (glassScaleHeight / 2) - eyeMouthDifference, null); smoke = changeBitmapSize(smoke, eyeWidth * 2); int rightMouthX = positions.Find(x => x.id == "rightMouth").positionX; int rightMouthY = positions.Find(x => x.id == "rightMouth").positionY; int cigaretteScaleWidth = smoke.GetScaledWidth(canvas); canvas.DrawBitmap(smoke, rightMouthX - (cigaretteScaleWidth - (cigaretteScaleWidth / 4)), rightEyeY - (glassScaleHeight / 2) + (eyeMouthDifference - (eyeMouthDifference / 8) - (eyeMouthDifference / 8)), null); faceImage.SetImageDrawable(new BitmapDrawable(Resources, tempBitmap)); }; buttonClear.Click += delegate { clearCanvas(); }; buttonSave.Click += delegate { var myCustomDialog = builder.Create(); myCustomDialog.Show(); }; var gridView4 = givenView.FindViewById <GridView>(Resource.Id.gridView4); gridView4.Adapter = new Adapters.GridviewAdapters.peopleGridviewAdapter(givenView.Context); gridView4.ItemClick += delegate(object sender, AdapterView.ItemClickEventArgs args) { int id = (int)args.Id; faceBitmap = BitmapFactory.DecodeResource(Resources, id); faceBitmap = changeBitmapSize(faceBitmap, 1500); faceImage.SetImageBitmap(faceBitmap); tempBitmap = Bitmap.CreateBitmap(faceBitmap.Width, faceBitmap.Height, Bitmap.Config.Rgb565); canvas = new Canvas(tempBitmap); canvas.DrawBitmap(faceBitmap, 0, 0, null); }; return(givenView); }