Example #1
0
        /// <inheritdoc />
        /// <exception cref="ArgumentNullException">
        /// <paramref name="source"/> is <b>null</b>.
        /// </exception>
        public PointsOfInterestFeatures BuildFeatures(ImageSource source, CancellationToken cancellationToken)
        {
            if (source == null)
            {
                throw new ArgumentNullException(nameof(source));
            }

            Image image = ImagePreprocessing.Process(source.Image, this.ImagePreprocessingOptions, 8);

            image = image
                    ////.Scale(null, 100.0 / image.HorizontalResolution, 100.0 / image.VerticalResolution, ScalingOptions.None)
                    ////.Binarize()
                    .Convert8To1(null, 128)
                    .CleanOverscan(0.5f, 0.5f)
                    .Deskew(null)
                    .Despeckle(null);

            ISet <ConnectedComponent> components = image.FindConnectedComponents(8);

            image.RemoveConnectedComponents(components.Where(x => x.Power <= 16));

            image = image
                    .CropBlackArea(0, 0)
                    ////.Dilate(null, StructuringElement.Square(3), 1, BorderType.BorderConst, image.WhiteColor)
                    ////.CropBlackArea(0, 0)
                    .Convert1To8(null)
                    .Erode(null, StructuringElement.Square(2), 1, BorderType.BorderRepl, 0)
                    .ScaleByDownsampling2(null)
                    .FilterLowpass(null, 3, BorderType.BorderRepl, 0);

            FeatureDetectors.Features features = this.detector.Detect(image, cancellationToken);

            return(new PointsOfInterestFeatures(source.Id, features));
        }
Example #2
0
        private async void OnTouchEffectAction(object sender, TouchActionEventArgs args)
        {
            if (!App.IsWorks)
            {
                App.IsWorks = true;
                var     viewPoint = args.Location;
                SKPoint point     =
                    new SKPoint((float)(canvasView.CanvasSize.Width * viewPoint.X / canvasView.Width),
                                (float)(canvasView.CanvasSize.Height * viewPoint.Y / canvasView.Height));

                var actionType = args.Type;
                _touchGestureRecognizer.ProcessTouchEvent(args.Id, actionType, point);

                ICommanderReceivedData commanderReceivedData = new ImageStored();
                RequestClass           requestClass          = RequestClass.Instance(commanderReceivedData);


                ImageData image = new ImageData();
                image.CoordinateXY = new int[] { (int)point.X, (int)point.Y };
                image.Base64       = Convert.ToBase64String(targetImageByte);
                image.Id           = 1;
                image = requestClass.SendAndTakeImage(image);

                if (image.Base64 != null)
                {
                    targetImageByte = Convert.FromBase64String(image.Base64);
                    imgCam.Source   = ImagePreprocessing.GetImageSourceFromByteArray(targetImageByte);
                    //     App.IsWorks = false;
                }
                else
                {
                    IsBusy = false;
                    //           App.IsWorks = false;
                }

                CurrentReport.Instance.Date = DateTime.UtcNow;
                // CurrentReport.Instance.ThermoImagePath = "icon_about.png";
                CurrentReport.Instance.Length  = Math.Round(double.Parse(image.Matlab.Height), 2);
                CurrentReport.Instance.Width   = Math.Round(double.Parse(image.Matlab.Width), 2);
                CurrentReport.Instance.Surface = Math.Round(double.Parse(image.Matlab.Arena), 2);
                CurrentReport.Instance.GranulationTissuePercentage = Math.Round(double.Parse(image.Matlab.MatrixC1), 2);
                CurrentReport.Instance.SludgePercentage            = Math.Round(double.Parse(image.Matlab.MatrixC2), 2);
                CurrentReport.Instance.NecrosisPercentage          = Math.Round(double.Parse(image.Matlab.MatrixC3), 2);
                //CurrentReport.Instance.WoundBaseTemperature = 0;
                // CurrentReport.Instance.SurroundingsTemperature = 0;
                var secondPage = new NewItemPage();

                await Navigation.PushAsync(secondPage);
            }
        }
Example #3
0
        /// <summary>
        /// Fill Image Filter Properties from Rendering Data source, if is present
        /// </summary>
        /// <param name="model">Image Filter Component model</param>
        private void FillImageFilterProperties(ImageFilterComponentModel model)
        {
            if (string.IsNullOrEmpty(model?.Rendering?.DataSource))
            {
                return;
            }

            var item = Sitecore.Context.Database.GetItem(new Sitecore.Data.ID(model.Rendering.DataSource));

            model.HasImageCaption     = !string.IsNullOrEmpty(item.Fields["HasImageCaption"]?.Value) && int.Parse(item.Fields["HasImageCaption"].Value) == 1;
            model.HasImageDescription = !string.IsNullOrEmpty(item.Fields["HasImageDescription"]?.Value) && int.Parse(item.Fields["HasImageDescription"].Value) == 1;

            var imageValue = item.Fields["Image"].Value;
            var firstLeft  = imageValue.IndexOf("{", StringComparison.InvariantCultureIgnoreCase);
            var firstRight = imageValue.IndexOf("}", StringComparison.InvariantCultureIgnoreCase);

            model.Id = new Sitecore.Data.ID(imageValue.Substring(firstLeft, firstRight - firstLeft + 1));

            var itemImage = Sitecore.Context.Database.GetItem(model.Id);
            var mediaItem = new Sitecore.Data.Items.MediaItem(itemImage);
            var image     = new System.Drawing.Bitmap(mediaItem.GetMediaStream());

            // Save original image as base 64 string
            model.OriginalImage = $"data:image/png;base64,{Convert.ToBase64String(ImagePreprocessing.ConvertImageToArray(image))}";
            model.Filter        = item.Fields["Filter"].Value;
            model.Angle         = string.IsNullOrEmpty(item.Fields["Angle"].Value) ? 0 : int.Parse(item.Fields["Angle"].Value);
            model.Height        = string.IsNullOrEmpty(item.Fields["Height"].Value) ? image.Width : int.Parse(item.Fields["Height"].Value);
            model.Width         = string.IsNullOrEmpty(item.Fields["Width"].Value) ? image.Height : int.Parse(item.Fields["Width"].Value);

            // Set the image filter parameters and process the image
            var filterParameters = new ImageFilterParameters
            {
                Image  = image,
                Filter = (EnumImageFilter)Enum.Parse(typeof(EnumImageFilter), model.Filter),
                Angle  = model.Angle,
                Height = model.Height,
                Width  = model.Width
            };
            var filterImage = ImagePreprocessing.ApplyFilter(filterParameters);

            // Save filtered image as base 64 string
            model.FilterImage = $"data:image/png;base64,{Convert.ToBase64String(ImagePreprocessing.ConvertImageToArray(filterImage))}";
        }
Example #4
0
        private async void BtnCam_Clicked(object sender, EventArgs e)
        {
            try
            {
                App.IsWorks = false;
                var photo = await CrossMedia.Current.TakePhotoAsync(new StoreCameraMediaOptions()
                {
                    DefaultCamera = Plugin.Media.Abstractions.CameraDevice.Rear,
                    Directory     = "Xamarin",
                    SaveToAlbum   = true
                });

                if (photo != null)
                {
                    this.Title      = "Choose a central point of a wound";
                    targetImageByte = ImagePreprocessing.GetByteArrayFromStream(photo.GetStream());
                    var imagesource = ImagePreprocessing.GetImageSourceFromByteArray(targetImageByte);
                    CurrentReport.Instance.StandardImagePath = photo.AlbumPath;
                    imgCam.Source = imagesource;
                    //  System.Drawing.Bitmap bitmap = ImagePreprocessing.GetBitmapFromImageSource(ImagePreprocessing.GetImageSourceFromByteArray(TargetImageByte));
                    // Mat
                    //   ImagePreprocessing.GetImage(TargetImageByte);
                    //       Image<Gray, byte> grayFrame = new Image<Gray, byte>(ImagePreprocessing.GetImageSourceFromByteArray(TargetImageByte).ToString());

                    //    imgCam.Source = ImagePreprocessing.GetImageSourceFromByteArray(TargetImageByte);
                    //  Accord.Imaging.Image.SetGrayscalePalette(bitmap);
                    //var a =    ImagePreprocessing.GetBitmapFromStream(photo.GetStream());

                    //    using (var stream = new MemoryStream())
                    //    {
                    // //       bitmap.Save(stream, System.Drawing.Imaging.ImageFormat.Png);
                    ////        imgCam.Source = ImageSource.FromStream(() => stream);
                    //    }
                }
                // byte[] imageArray = System.IO.File.ReadAllBytes(photo.Path);
                //   System.Drawing.Bitmap bitmap = new System.Drawing.Bitmap(imgCam.Source);  // Android.Graphics.BitmapFactory.DecodeByteArray(imageArray,0 ,imageArray.Length).to;
            }
            catch (Exception ex)
            {
                await DisplayAlert("Error", ex.Message.ToString(), "Ok");
            }
        }