private void InvokeFill(MediaVisionSource source, ImageFillConfiguration config, Rectangle?area) { if (source == null) { throw new ArgumentNullException(nameof(source)); } InvokeFill(source.Handle, EngineConfiguration.GetHandle(config), area). Validate("Failed to fill the image object"); }
internal static IntPtr GetHandle(EngineConfiguration config) { if (config == null) { return(IntPtr.Zero); } if (config._disposed) { throw new ObjectDisposedException(config.GetType().Name); } return(config._handle); }
/// <summary> /// Detects barcodes on the source and reads the message from it with <see cref="BarcodeDetectionConfiguration"/>. /// </summary> /// <param name="source">The <see cref="MediaVisionSource"/> instance.</param> /// <param name="roi">Region of interest - rectangular area on the source which will be used for /// barcode detection. Note that roi should be inside area on the source.</param> /// <param name="config">The configuration of the barcode detector. This value can be null.</param> /// <returns>A task that represents the asynchronous detect operation.</returns> /// <exception cref="ArgumentNullException"><paramref name="source"/> is null.</exception> /// <exception cref="NotSupportedException">The feature is not supported.</exception> /// <exception cref="ObjectDisposedException"> /// <paramref name="source"/> already has been disposed of.<br/> /// -or-<br/> /// <paramref name="config"/> already has been disposed of. /// </exception> /// <seealso cref="Barcode"/> /// <since_tizen> 4</since_tizen> public static async Task <IEnumerable <Barcode> > DetectAsync(MediaVisionSource source, Rectangle roi, BarcodeDetectionConfiguration config) { if (source == null) { throw new ArgumentNullException(nameof(source)); } var tcs = new TaskCompletionSource <IEnumerable <Barcode> >(); using (var cb = ObjectKeeper.Get(GetCallback(tcs))) { InteropBarcode.Detect(source.Handle, EngineConfiguration.GetHandle(config), roi.ToMarshalable(), cb.Target).Validate("Failed to detect barcode."); return(await tcs.Task); } }
/// <summary> /// Detects faces on the source.<br/> /// Each time when DetectAsync is called, a set of the detected faces at the media source are received asynchronously. /// </summary> /// <param name="source">The source of the media where faces will be detected.</param> /// <param name="config">The configuration of engine will be used for detecting. This value can be null.</param> /// <returns>A task that represents the asynchronous detect operation.</returns> /// <exception cref="ArgumentNullException"><paramref name="source"/> is null.</exception> /// <exception cref="NotSupportedException">The feature is not supported.</exception> /// <feature>http://tizen.org/feature/vision.face_recognition</feature> /// <since_tizen> 4 </since_tizen> public static async Task <Rectangle[]> DetectAsync(MediaVisionSource source, FaceDetectionConfiguration config) { if (source == null) { throw new ArgumentNullException(nameof(source)); } TaskCompletionSource <Rectangle[]> tcs = new TaskCompletionSource <Rectangle[]>(); using (var cb = ObjectKeeper.Get(GetCallback(tcs))) { InteropFace.Detect(source.Handle, EngineConfiguration.GetHandle(config), cb.Target). Validate("Failed to perform face detection"); return(await tcs.Task); } }
private static void GenerateImage(BarcodeGenerationConfiguration config, string message, BarcodeType type, BarcodeImageConfiguration imageConfig, int qrMode, int qrEcc, int qrVersion) { if (message == null) { throw new ArgumentNullException(nameof(message)); } if (imageConfig == null) { throw new ArgumentNullException(nameof(imageConfig)); } ValidationUtil.ValidateEnum(typeof(BarcodeType), type); InteropBarcode.GenerateImage(EngineConfiguration.GetHandle(config), message, imageConfig.Width, imageConfig.Height, type, qrMode, qrEcc, qrVersion, imageConfig.Path, imageConfig.Format). Validate("Failed to generate image"); }
/// <summary> /// Tracks the given image tracking model on the current frame and <see cref="ImageTrackingConfiguration"/>. /// </summary> /// <param name="source">The current image of sequence where the image tracking model will be tracked.</param> /// <param name="trackingModel">The image tracking model which processed as target of tracking.</param> /// <param name="config">The configuration used for tracking. This value can be null.</param> /// <returns>A task that represents the asynchronous tracking operation.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="source"/> is null.<br/> /// -or-<br/> /// <paramref name="trackingModel"/> is null. /// </exception> /// <exception cref="NotSupportedException">The feature is not supported.</exception> /// <exception cref="ObjectDisposedException"> /// <paramref name="source"/> has already been disposed of.<br/> /// -or-<br/> /// <paramref name="trackingModel"/> has already been disposed of.<br/> /// -or-<br/> /// <paramref name="config"/> has already been disposed of. /// </exception> /// <exception cref="ArgumentException"><paramref name="trackingModel"/> has no target.</exception> /// <seealso cref="ImageTrackingModel.SetTarget(ImageObject)"/> /// <feature>http://tizen.org/feature/vision.image_recognition</feature> /// <since_tizen> 4 </since_tizen> public static async Task <Quadrangle> TrackAsync(MediaVisionSource source, ImageTrackingModel trackingModel, ImageTrackingConfiguration config) { if (source == null) { throw new ArgumentNullException(nameof(source)); } if (trackingModel == null) { throw new ArgumentNullException(nameof(trackingModel)); } TaskCompletionSource <Quadrangle> tcs = new TaskCompletionSource <Quadrangle>(); using (var cb = ObjectKeeper.Get(GetCallback(tcs))) { InteropImage.Track(source.Handle, trackingModel.Handle, EngineConfiguration.GetHandle(config), cb.Target).Validate("Failed to perform image tracking."); return(await tcs.Task); } }
private static async Task <FaceRecognitionResult> InvokeRecognizeAsync(MediaVisionSource source, FaceRecognitionModel recognitionModel, Rectangle?area, FaceRecognitionConfiguration config) { if (source == null) { throw new ArgumentNullException(nameof(source)); } if (recognitionModel == null) { throw new ArgumentNullException(nameof(recognitionModel)); } TaskCompletionSource <FaceRecognitionResult> tcs = new TaskCompletionSource <FaceRecognitionResult>(); using (var cb = ObjectKeeper.Get(GetRecognizedCallback(tcs))) { InvokeRecognize(source.Handle, recognitionModel.Handle, EngineConfiguration.GetHandle(config), cb.Target, area).Validate("Failed to perform face recognition."); return(await tcs.Task); } }
/// <summary> /// Learns the face recognition model with <see cref="FaceRecognitionConfiguration"/>. /// </summary> /// <remarks> /// Before you start the learning process, face recognition models have to be filled with the training data - face image examples. /// These examples have to be provided by <see cref="Add(MediaVisionSource, int)"/> or <see cref="Add(MediaVisionSource, int, Rectangle)"/>. /// Recognition accuracy is usually increased when the different examples of the identical faces are added more and more. /// But it depends on the used learning algorithm. /// </remarks> /// <param name="config">The configuration used for learning of the recognition models. This value can be null.</param> /// <exception cref="ObjectDisposedException"> /// The <see cref="FaceRecognitionModel"/> has already been disposed of.<br/> /// -or-<br/> /// <paramref name="config"/> has already been disposed of. /// </exception> /// <exception cref="InvalidOperationException">No examples added.</exception> /// <seealso cref="Add(MediaVisionSource, int)"/> /// <seealso cref="Add(MediaVisionSource, int, Rectangle)"/> /// <since_tizen> 4 </since_tizen> public void Learn(FaceRecognitionConfiguration config) { InteropModel.Learn(EngineConfiguration.GetHandle(config), Handle). Validate("Failed to learn"); }