Ejemplo n.º 1
0
        public void Load(DicomFile file, StructureSet structureSet, IProgress <double> progress)
        {
            structureSet.FileName = file.File.Name;

            structureSet.Name = file.Dataset.GetSingleValueOrDefault <string>(DicomTag.StructureSetLabel, "");

            Dictionary <int, string> roi_names = new Dictionary <int, string>();
            DicomSequence            structs   = file.Dataset.GetSequence(DicomTag.StructureSetROISequence);

            foreach (DicomDataset item in structs)
            {
                roi_names.Add(item.GetSingleValue <int>(DicomTag.ROINumber), item.GetSingleValue <string>(DicomTag.ROIName));
            }

            DicomSequence s = file.Dataset.GetSequence(DicomTag.ROIContourSequence);

            //Track the item number to report progress
            double total = s.Items.Count;
            double num   = 0;

            foreach (DicomDataset item in s.Items)
            {
                num++;
                if (progress != null)
                {
                    progress.Report(100 * num / total);
                }

                RegionOfInterest roi = new RegionOfInterest();

                int[] color = new int[] { 0, 0, 0 };
                if (item.TryGetValues <int>(DicomTag.ROIDisplayColor, out int[] tmp))
Ejemplo n.º 2
0
        void Start()
        {
            if (this.rosbridgeIP.Equals(string.Empty))
            {
                this.rosbridgeIP = ConfigManager.Instance.configInfo.rosbridgeIP;
            }
            if (this.sigverseBridgePort == 0)
            {
                this.sigverseBridgePort = ConfigManager.Instance.configInfo.sigverseBridgePort;
            }


            this.tcpClient = new System.Net.Sockets.TcpClient(this.rosbridgeIP, this.sigverseBridgePort);

            this.networkStream = this.tcpClient.GetStream();

            this.networkStream.ReadTimeout  = 100000;
            this.networkStream.WriteTimeout = 100000;


            // Depth Camera
            this.xtionDepthCamera = SIGVerseUtils.FindTransformFromChild(this.transform.root, "camera_depth_optical_frame").GetComponentInChildren <Camera>();

            int imageWidth  = this.xtionDepthCamera.targetTexture.width;
            int imageHeight = this.xtionDepthCamera.targetTexture.height;

            this.byteArray = new byte[imageWidth * imageHeight * 2];

            for (int i = 0; i < this.byteArray.Length; i++)
            {
                this.byteArray[i] = 0;
            }

            this.imageTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);


            //  [camera/depth/CameraInfo]
            string distortionModel = "plumb_bob";

            double[]         D   = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[]         K   = { 570.3422241210938, 0.0, 314.5, 0.0, 570.3422241210938, 235.5, 0.0, 0.0, 1.0 };
            double[]         R   = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[]         P   = { 570.3422241210938, 0.0, 314.5, 0.0, 0.0, 570.3422241210938, 235.5, 0.0, 0.0, 0.0, 1.0, 0.0 };
            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            this.cameraInfoData = new CameraInfoForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, distortionModel, D, K, R, P, 0, 0, roi);

            //  [camera/depth/Image_raw]
            string encoding    = "16UC1";
            byte   isBigendian = 0;
            uint   step        = (uint)imageWidth * 2;

            this.imageData = new ImageForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, encoding, isBigendian, step, null);

            this.header = new Header(0, new SIGVerse.RosBridge.msg_helpers.Time(0, 0), "camera_depth_optical_frame");


            this.cameraInfoMsg = new SIGVerseRosBridgeMessage <CameraInfoForSIGVerseBridge>("publish", this.topicNameCameraInfo, CameraInfoForSIGVerseBridge.GetMessageType(), this.cameraInfoData);
            this.imageMsg      = new SIGVerseRosBridgeMessage <ImageForSIGVerseBridge>     ("publish", this.topicNameImage, ImageForSIGVerseBridge.GetMessageType(), this.imageData);
        }
Ejemplo n.º 3
0
        public void TestRegionOfInterest()
        {
            IRegionOfInterest v = new RegionOfInterest();

            v.Code.Code = "ELLIPSE";
            v.Value.Append().Value = 3;
            v.Value.Append().Value = 1;
            v.Value.Append().Value = 3;
            v.Value.Append().Value = 7;
            v.Value.Append().Value = 2;
            v.Value.Append().Value = 4;
            v.Value.Append().Value = 4;
            v.Value.Append().Value = 4;
            v.XmlId = "MM1";
            var er = v.EntryRelationship.Append();

            er.AsObservationMedia.Value.MediaType       = "image/gif";
            er.AsObservationMedia.Value.Reference.Value = "lefthand.gif";

            TestContext.WriteLine(v.Xml);

            foreach (IRegionOfInterestvalue iv in v.Value)
            {
                TestContext.WriteLine("Value: {0}", iv.Value);
            }
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Sets camera to focus on the passed in region of interest
        /// </summary>
        /// <param name="region">The region to focus on, or null to focus on the default region</param>
        /// <returns></returns>
        private async Task <MediaCaptureFocusState> FocusCamera(RegionOfInterest region)
        {
            var roiControl   = _mediaCapture.VideoDeviceController.RegionsOfInterestControl;
            var focusControl = _mediaCapture.VideoDeviceController.FocusControl;

            if (region != null)
            {
                // If the call provided a region, then set it
                await roiControl.SetRegionsAsync(new[] { region }, true);

                var focusRange = focusControl.SupportedFocusRanges.Contains(AutoFocusRange.FullRange) ? AutoFocusRange.FullRange : focusControl.SupportedFocusRanges.FirstOrDefault();
                var focusMode  = focusControl.SupportedFocusModes.Contains(FocusMode.Single) ? FocusMode.Single : focusControl.SupportedFocusModes.FirstOrDefault();

                var settings = new FocusSettings {
                    Mode = focusMode, AutoFocusRange = focusRange
                };

                focusControl.Configure(settings);
            }
            else
            {
                // If no region provided, clear any regions and reset focus
                await roiControl.ClearRegionsAsync();
            }

            await focusControl.FocusAsync();

            return(focusControl.FocusState);
        }
Ejemplo n.º 5
0
        public override void LayoutSubviews()
        {
            base.LayoutSubviews();

            // Disable CoreAnimation actions so that the positions of the sublayers immediately move to their new position.
            CATransaction.Begin();
            CATransaction.DisableActions = true;

            // Create the path for the mask layer. We use the even odd fill rule so that the region of interest does not have a fill color.
            var path = UIBezierPath.FromRect(new CGRect(0, 0, Frame.Width, Frame.Height));

            path.AppendPath(UIBezierPath.FromRect(RegionOfInterest));
            path.UsesEvenOddFillRule = true;
            maskLayer.Path           = path.CGPath;

            regionOfInterestOutline.Path = CGPath.FromRect(RegionOfInterest);

            var r = RegionOfInterestControlRadius;

            topLeftControl.Position     = RegionOfInterest.CornerTopLeft().WithOffsetX(-r).WithOffsetY(-r);
            topRightControl.Position    = RegionOfInterest.CornerTopRight().WithOffsetX(-r).WithOffsetY(-r);
            bottomLeftControl.Position  = RegionOfInterest.CornerBottomLeft().WithOffsetX(-r).WithOffsetY(-r);
            bottomRightControl.Position = RegionOfInterest.CornerBottomRight().WithOffsetX(-r).WithOffsetY(-r);

            CATransaction.Commit();
        }
        // </SnippetTapFocusPreviewControl>


        /// <summary>
        /// Focus the camera on the given rectangle of the preview, defined by the position and size parameters, in UI coordinates within the CaptureElement
        /// </summary>
        /// <param name="position">The position of the tap, to become the center of the focus rectangle</param>
        /// <param name="size">the size of the rectangle around the tap</param>
        /// <returns></returns>
        ///
        // <SnippetTapToFocus>
        public async Task TapToFocus(Point position, Size size)
        {
            _isFocused = true;

            var previewRect  = GetPreviewStreamRectInControl();
            var focusPreview = ConvertUiTapToPreviewRect(position, size, previewRect);

            // Note that this Region Of Interest could be configured to also calculate exposure
            // and white balance within the region
            var regionOfInterest = new RegionOfInterest
            {
                AutoFocusEnabled = true,
                BoundsNormalized = true,
                Bounds           = focusPreview,
                Type             = RegionOfInterestType.Unknown,
                Weight           = 100,
            };


            var focusControl = _mediaCapture.VideoDeviceController.FocusControl;
            var focusRange   = focusControl.SupportedFocusRanges.Contains(AutoFocusRange.FullRange) ? AutoFocusRange.FullRange : focusControl.SupportedFocusRanges.FirstOrDefault();
            var focusMode    = focusControl.SupportedFocusModes.Contains(FocusMode.Single) ? FocusMode.Single : focusControl.SupportedFocusModes.FirstOrDefault();
            var settings     = new FocusSettings {
                Mode = focusMode, AutoFocusRange = focusRange
            };

            focusControl.Configure(settings);

            var roiControl = _mediaCapture.VideoDeviceController.RegionsOfInterestControl;
            await roiControl.SetRegionsAsync(new[] { regionOfInterest }, true);

            await focusControl.FocusAsync();
        }
Ejemplo n.º 7
0
        ///////////////////////////////////////////////////////////////////////
        //  Make a linear ramp of data and display it.
        ///////////////////////////////////////////////////////////////////////
        private void GenerateRamps()
        {
            // Make A Frame 200x400 pixels
            ushort[] frame1 = new ushort[200 * 400];
            for (int pix = 0; pix < 200 * 400; pix++)
            {
                frame1[pix] = (ushort)(pix % 400);
            }

            // Make A Frame 300x500 pixels with a 1k bias
            ushort[] frame2 = new ushort[300 * 500];
            for (int pix = 0; pix < 300 * 500; pix++)
            {
                frame2[pix] = (ushort)((pix % 500) + 1000);
            }

            // Get the addin file manager
            var datamgr = LightFieldApplication.DataManager;

            if (datamgr != null)
            {
                RegionOfInterest roi  = new RegionOfInterest(0, 0, 400, 200, 1, 1);
                RegionOfInterest roi2 = new RegionOfInterest(0, 0, 500, 300, 1, 1);

                // Simple Single Region
                IImageDataSet imageData = datamgr.CreateImageDataSet(frame1, roi, ImageDataFormat.MonochromeUnsigned16);

                RegionOfInterest[] rois = { roi, roi2 };

                List <Array> buffers = new List <Array>();
                buffers.Add(frame1);
                buffers.Add(frame2);

                // Complex Set Containing Two Regions
                IImageDataSet imageDataSets = datamgr.CreateImageDataSet(buffers, rois, ImageDataFormat.MonochromeUnsigned16);

                IDisplay display = LightFieldApplication.DisplayManager;
                if (display != null)
                {
                    // Select Data File Compare Mode & 3 Vertical Windows
                    display.ShowDisplay(DisplayLocation.ExperimentWorkspace, DisplayLayout.TwoHorizontal);
                    IDisplayViewer view = null;

                    // Modify the underlying data a little bit before displaying it.
                    ModifyDataExample(imageData, 0);

                    // Put the simple data in window 0
                    view = display.GetDisplay(DisplayLocation.ExperimentWorkspace, 0);
                    view.Display("SimpleRamp", imageData);

                    // Modify the underlying data a little bit before displaying it.
                    ModifyDataExample(imageDataSets, 1);

                    // Put the complex data in window 1
                    view = display.GetDisplay(DisplayLocation.ExperimentWorkspace, 1);
                    view.Display("ComplexRamp", imageDataSets);
                }
            }
        }
        public async Task AutoFocusAsync(int x, int y, bool useCoordinates)
        {
            if (IsFocusSupported)
            {
                var focusControl = mediaCapture.VideoDeviceController.FocusControl;
                var roiControl   = mediaCapture.VideoDeviceController.RegionsOfInterestControl;
                try
                {
                    if (roiControl.AutoFocusSupported && roiControl.MaxRegions > 0)
                    {
                        if (useCoordinates)
                        {
                            var props = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

                            var previewEncodingProperties = GetPreviewResolution(props);
                            var previewRect      = GetPreviewStreamRectInControl(previewEncodingProperties, captureElement);
                            var focusPreview     = ConvertUiTapToPreviewRect(new Point(x, y), new Size(20, 20), previewRect);
                            var regionOfInterest = new RegionOfInterest
                            {
                                AutoFocusEnabled = true,
                                BoundsNormalized = true,
                                Bounds           = focusPreview,
                                Type             = RegionOfInterestType.Unknown,
                                Weight           = 100
                            };
                            await roiControl.SetRegionsAsync(new[] { regionOfInterest }, true);

                            var focusRange = focusControl.SupportedFocusRanges.Contains(AutoFocusRange.FullRange)
                                ? AutoFocusRange.FullRange
                                : focusControl.SupportedFocusRanges.FirstOrDefault();

                            var focusMode = focusControl.SupportedFocusModes.Contains(FocusMode.Single)
                                ? FocusMode.Single
                                : focusControl.SupportedFocusModes.FirstOrDefault();

                            var settings = new FocusSettings
                            {
                                Mode           = focusMode,
                                AutoFocusRange = focusRange,
                            };

                            focusControl.Configure(settings);
                        }
                        else
                        {
                            // If no region provided, clear any regions and reset focus
                            await roiControl.ClearRegionsAsync();
                        }
                    }

                    await focusControl.FocusAsync();
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("AutoFocusAsync Error: {0}", ex);
                }
            }
        }
Ejemplo n.º 9
0
        private void CustomRegion_Checked(object sender, RoutedEventArgs e)
        {
            RegionOfInterest roi = new RegionOfInterest(x_, y_, w_, h_, xb_, yb_);

            RegionOfInterest[] regions = { roi };
            if (CameraExist)
            {
                experiment_.SetCustomRegions(regions);
            }
        }
Ejemplo n.º 10
0
        void Start()
        {
            if (this.rosbridgeIP.Equals(string.Empty))
            {
                this.rosbridgeIP = ConfigManager.Instance.configInfo.rosbridgeIP;
            }
            if (this.sigverseBridgePort == 0)
            {
                this.sigverseBridgePort = ConfigManager.Instance.configInfo.sigverseBridgePort;
            }


            this.tcpClient = new System.Net.Sockets.TcpClient(this.rosbridgeIP, this.sigverseBridgePort);

            Debug.Log("Connected=" + this.tcpClient.Connected);

            this.networkStream = this.tcpClient.GetStream();

            this.networkStream.ReadTimeout  = 100000;
            this.networkStream.WriteTimeout = 100000;


            // RGB Camera
            this.xtionRGBCamera = this.transform.Find("Xtion_rgb").GetComponent <Camera>();

            int imageWidth  = this.xtionRGBCamera.targetTexture.width;
            int imageHeight = this.xtionRGBCamera.targetTexture.height;

            this.imageTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);


            //  [camera/rgb/CameraInfo]
            string distortionModel = "plumb_bob";

            double[]         D   = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[]         K   = { 570.3422241210938, 0.0, 319.5, 0.0, 570.3422241210938, 239.5, 0.0, 0.0, 1.0 };
            double[]         R   = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[]         P   = { 570.3422241210938, 0.0, 319.5, 0.0, 0.0, 570.3422241210938, 239.5, 0.0, 0.0, 0.0, 1.0, 0.0 };
            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            this.cameraInfoData = new CameraInfoForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, distortionModel, D, K, R, P, 0, 0, roi);

            //  [camera/rgb/Image_raw]
            string encoding    = "rgb8";
            byte   isBigendian = 0;
            uint   step        = (uint)imageWidth * 3;

            this.imageData = new ImageForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, encoding, isBigendian, step, null);

            this.header = new Header(0, new SIGVerse.RosBridge.msg_helpers.Time(0, 0), "camera_rgb_optical_frame");


            this.cameraInfoMsg = new SIGVerseRosBridgeMessage <CameraInfoForSIGVerseBridge>("publish", this.topicNameCameraInfo, CameraInfoForSIGVerseBridge.GetMessageType(), this.cameraInfoData);
            this.imageMsg      = new SIGVerseRosBridgeMessage <ImageForSIGVerseBridge>     ("publish", this.topicNameImage, ImageForSIGVerseBridge.GetMessageType(), this.imageData);
        }
Ejemplo n.º 11
0
 public override int GetHashCode()
 {
     unchecked
     {
         var hashCode = RegionOfInterest.GetHashCode();
         hashCode = (hashCode * 397) ^ TargetFrameSize.GetHashCode();
         hashCode = (hashCode * 397) ^ (int)TargetFormat;
         hashCode = (hashCode * 397) ^ (int)ScaleQuality;
         return(hashCode);
     }
 }
Ejemplo n.º 12
0
        protected override CameraInfoForSIGVerseBridge InitializeCameraInfo(uint imageHeight, uint imageWidth)
        {
            string distortionModel = "plumb_bob";

            uint binningX = 0;
            uint binningY = 0;

            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            return(new CameraInfoForSIGVerseBridge(null, imageHeight, imageWidth, distortionModel, ZEDMiniPubRGB.D, ZEDMiniPubRGB.K, ZEDMiniPubRGB.R, ZEDMiniPubRGB.P, binningX, binningY, roi));
        }
Ejemplo n.º 13
0
        /// <summary>
        /// Tree view selected Item Change
        /// </summary>
        /// <param name="sender">sender object</param>
        /// <param name="e">routed event args</param>
        private void RoiProjectTreeViewSelectedItemChanged(object sender, RoutedPropertyChangedEventArgs <object> e)
        {
            var selectedItem       = (TreeViewItem)this.RoiProjectTreeView.SelectedItem;
            var selectedItemHeader = (string)selectedItem.Header;

            this.currentRoi = this.roiObjects.Find(roi => roi.Title == selectedItemHeader);

            this.ImagePathTB.Text = this.currentRoi.ImageFilePath;
            this.AreaTB.Text      = this.currentRoi.Area.ToString(CultureInfo.InvariantCulture);
            this.MeanIntTB.Text   = this.currentRoi.MeanIntensity.ToString(CultureInfo.InvariantCulture);

            this.UpdateLayout();
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Focus the camera on the given rectangle of the preview, defined by the position and size parameters, in UI coordinates within the CaptureElement
        /// </summary>
        /// <param name="position">The position of the tap, to become the center of the focus rectangle</param>
        /// <param name="size">the size of the rectangle around the tap</param>
        /// <returns></returns>
        public async Task TapToFocus(Point position, Size size)
        {
            // Transition to the "focused" state
            _isFocused = true;

            var previewEncodingProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            var previewRect = GetPreviewStreamRectInControl(previewEncodingProperties, PreviewControl, _displayOrientation);

            // Build UI rect that will highlight the tapped area
            FocusRectangle.Width  = size.Width;
            FocusRectangle.Height = size.Height;

            // Adjust for the rect to be centered around the tap position
            var left = position.X - FocusRectangle.Width / 2;
            var top  = position.Y - FocusRectangle.Height / 2;

            // Move it so it's contained within the preview stream in the UI and doesn't reach into letterboxing area or outside of window bounds

            // The left and right edges should not be outside the active preview area within the CaptureElement
            left = Math.Max(previewRect.Left, left);
            left = Math.Min(previewRect.Width - FocusRectangle.Width + previewRect.Left, left);

            // The top and bottom edges should not be outside the active preview area within the CaptureElement
            top = Math.Max(previewRect.Top, top);
            top = Math.Min(previewRect.Height - FocusRectangle.Height + previewRect.Top, top);

            // Apply the adjusted position to the FocusRectangle
            Canvas.SetLeft(FocusRectangle, left);
            Canvas.SetTop(FocusRectangle, top);

            FocusRectangle.Stroke     = new SolidColorBrush(Colors.White);
            FocusRectangle.Visibility = Visibility.Visible;

            // FocusRectangle exists in UI coordinates, need to convert to preview coordinates and adjust for rotation if necessary
            var focusPreview = ConvertUiTapToPreviewRect(position, size, previewRect);

            // Note that this Region Of Interest could be configured to also calculate exposure and white balance within the region
            var regionOfInterest = new RegionOfInterest
            {
                AutoFocusEnabled = true,
                BoundsNormalized = true,
                Bounds           = focusPreview,
                Type             = RegionOfInterestType.Unknown,
                Weight           = 100,
            };

            var result = await FocusCamera(regionOfInterest);

            // Update the UI based on the result of the focusing operation
            FocusRectangle.Stroke = (result == MediaCaptureFocusState.Focused ? new SolidColorBrush(Colors.Lime) : new SolidColorBrush(Colors.Red));
        }
Ejemplo n.º 15
0
        public async Task <DoseVolumeHistogram> Build(IDoseObject dose, RegionOfInterest roi)
        {
            var pi = ProgressService.CreateNew("Building DVH... ", false);
            DoseVolumeHistogram dvh = null;
            await Task.Run(() =>
            {
                dvh = new DoseVolumeHistogram(dose, roi);
                dvh.Compute();
            }
                           );

            ProgressService.End(pi);
            return(dvh);
        }
Ejemplo n.º 16
0
        public bool ShouldReceiveTouch(UIGestureRecognizer gestureRecognizer, UITouch touch)
        {
            // Ignore drags outside of the region of interest (plus some padding).
            if (gestureRecognizer == resizeRegionOfInterestGestureRecognizer)
            {
                var touchLocation          = touch.LocationInView(gestureRecognizer.View);
                var paddedRegionOfInterest = RegionOfInterest.Inset(-regionOfInterestCornerTouchThreshold, -regionOfInterestCornerTouchThreshold);
                if (!paddedRegionOfInterest.Contains(touchLocation))
                {
                    return(false);
                }
            }

            return(true);
        }
Ejemplo n.º 17
0
        ///////////////////////////////////////////////////////////////////////
        private void CreateCalibratedFile(double [] calibration, double [] errors)
        {
            // No Calibration So Make Something up
            if (calibration == null)
            {
                calibration = new double[720];
                for (int i = 0; i < 720; i++)
                {
                    calibration[i] = i * 3.0;
                }
            }

            // Size of data passed in
            ushort[] cosine = new ushort[calibration.Length];

            // Generate Curves (Amplitude 100)
            for (int pix = 0; pix < calibration.Length; pix++)
            {
                // Convert To Angle
                double angle = Math.PI * ((double)pix - 360) / 180.0;
                // Compute Points
                cosine[pix] = (ushort)((double)100 * (Math.Cos(angle) + (double)1));
            }

            // Mkae a region of interest (Single Strip)
            RegionOfInterest roi = new RegionOfInterest(0, 0, calibration.Length, 1, 1, 1);

            // Get the file manager
            var filemgr = LightFieldApplication.FileManager;

            if (filemgr != null)
            {
                RegionOfInterest[] rois          = { roi };
                string             root          = (string)LightFieldApplication.Experiment.GetValue(ExperimentSettings.FileNameGenerationDirectory);
                IImageDataSet      calSampleData = filemgr.CreateFile(root + "\\CalibrationSample.Spe", rois, 1, ImageDataFormat.MonochromeUnsigned16);

                // Put Data to frame 1
                IImageData data1 = calSampleData.GetFrame(0, 0);
                data1.SetData(cosine);

                // Update The XML for the new document
                SetCalibrationAndError(calSampleData, calibration, errors);

                // Close the file
                filemgr.CloseFile(calSampleData);
            }
        }
Ejemplo n.º 18
0
        ///////////////////////////////////////////////////////////////////////
        // Show 4 Plots of Cos vs Sine in Quad Display
        // 1.) Generates Raw Data
        // 2.) Builds IImageData(s) with the raw data from the DataManager.
        // 3.) Gets 4 Displays and Puts 2 Waveforms in each display.
        ///////////////////////////////////////////////////////////////////////
        private void PlotSinAndCos()
        {
            // Make two curves (720 = 2*PI so its a full cycle)
            ushort[] cosine = new ushort[720];
            ushort[] sine   = new ushort[720];

            // Generate Curves (Amplitude 100)
            for (int pix = 0; pix < 720; pix++)
            {
                // Convert To Angle
                double angle = Math.PI * ((double)pix - 360) / 180.0;

                // Compute Points
                cosine[pix] = (ushort)((double)100 * (Math.Cos(angle) + (double)1));
                sine[pix]   = (ushort)((double)100 * (Math.Sin(angle) + (double)1));
            }

            // Get the data manager
            var datamgr = LightFieldApplication.DataManager;

            if (datamgr != null)
            {
                RegionOfInterest roi = new RegionOfInterest(0, 0, 720, 1, 1, 1);
                // Create Blobs
                IImageDataSet cosData  = datamgr.CreateImageDataSet(cosine, roi, ImageDataFormat.MonochromeUnsigned16);
                IImageDataSet sineData = datamgr.CreateImageDataSet(sine, roi, ImageDataFormat.MonochromeUnsigned16);

                // Get The Display Object
                IDisplay display = LightFieldApplication.DisplayManager;
                if (display != null)
                {
                    // Select Data File Compare Mode & 4 Even Windows
                    display.ShowDisplay(DisplayLocation.ExperimentWorkspace, DisplayLayout.FourEven);
                    IDisplayViewer view = null;

                    // Put the data in all 4 windows
                    for (int i = 0; i <= 3; i++)
                    {
                        view = display.GetDisplay(DisplayLocation.ExperimentWorkspace, i);
                        view.Display("Cosine", cosData);
                        IDisplaySource sinSource = display.Create("Sine", sineData);
                        view.Add(sinSource);
                    }
                }
            }
        }
Ejemplo n.º 19
0
        public DoseVolumeHistogram(IDoseObject dose, RegionOfInterest roi, int nbins)
        {
            DoseObject = dose;
            ROIObject  = roi;
            NBins      = nbins;

            Dose = new double[NBins];
            for (int i = 0; i < Dose.Length; i++)
            {
                Dose[i] = i;
            }
            CumulativeVolume = new double[NBins];
            for (int i = 0; i < Dose.Length; i++)
            {
                CumulativeVolume[i] = 1;
            }
        }
Ejemplo n.º 20
0
        protected virtual CameraInfoForSIGVerseBridge InitializeCameraInfo(uint imageHeight, uint imageWidth)
        {
            //
            // Example (Xtion RGB parameters)
            //
            string distortionModel = "plumb_bob";

            double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[] K = { 554, 0.0, 320, 0.0, 554, 240, 0.0, 0.0, 1.0 };
            double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[] P = { 554, 0.0, 320, 0.0, 0.0, 554, 240, 0.0, 0.0, 0.0, 1.0, 0.0 };

            uint binningX = 0;
            uint binningY = 0;

            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            return(new CameraInfoForSIGVerseBridge(null, imageHeight, imageWidth, distortionModel, D, K, R, P, binningX, binningY, roi));
        }
Ejemplo n.º 21
0
 /// <summary>
 /// 设置焦点
 /// </summary>
 public static async Task SetROI(double left, double top, double width, double height)
 {
     try
     {
         if (IsSupportROI == false)
         {
             return;
         }
         if (left < 0)
         {
             left = 0;
         }
         else if (left + width > CurrentPreviewW)
         {
             left = CurrentPreviewW - width;
         }
         if (top < 0)
         {
             top = 0;
         }
         else if (top + height > CurrentPreviewH)
         {
             top = CurrentPreviewH - height;
         }
         var roi = new RegionOfInterest();
         roi.AutoFocusEnabled        = IsSetFocusAF;
         roi.AutoExposureEnabled     = IsSetFocusAE;
         roi.AutoWhiteBalanceEnabled = false;
         roi.Bounds = new Rect(left, top, width, height);
         try
         {
             await MainCamera.VideoDeviceController.RegionsOfInterestControl.SetRegionsAsync(new List <RegionOfInterest>() { roi });
         }
         catch (Exception ex)
         {
         }
     }
     catch (Exception ex)
     {
     }
 }
Ejemplo n.º 22
0
        public void Load(DicomFile[] files, StructureSet structureSet)
        {
            DicomFile file = files[0];

            structureSet.Name = file.Dataset.Get <string>(DicomTag.StructureSetLabel, "");

            Dictionary <int, string> roi_names = new Dictionary <int, string>();
            DicomSequence            structs   = file.Dataset.Get <DicomSequence>(DicomTag.StructureSetROISequence);

            foreach (DicomDataset item in structs)
            {
                roi_names.Add(item.Get <int>(DicomTag.ROINumber), item.Get <string>(DicomTag.ROIName));
            }

            List <RegionOfInterest> rois = new List <RegionOfInterest>();

            DicomSequence s = file.Dataset.Get <DicomSequence>(DicomTag.ROIContourSequence);

            foreach (DicomDataset item in s.Items)
            {
                RegionOfInterest roi   = new RegionOfInterest();
                int[]            color = item.Get <int[]>(DicomTag.ROIDisplayColor);
                roi.Color     = DicomColor.FromRgb(color[0], color[1], color[2]);
                roi.ROINumber = item.Get <int>(DicomTag.ReferencedROINumber);
                if (roi_names.ContainsKey(roi.ROINumber))
                {
                    roi.Name = roi_names[roi.ROINumber];
                }

                DicomSequence roi_definitions;
                try
                {
                    roi_definitions = item.Get <DicomSequence>(DicomTag.ContourSequence);
                }
                catch (Exception e)
                {
                    continue;
                }

                double xmin = double.MaxValue, ymin = double.MaxValue, zmin = double.MaxValue, xmax = double.MinValue, ymax = double.MinValue, zmax = double.MinValue;

                foreach (DicomDataset contourSlice in roi_definitions.Items)
                {
                    int           vertex_count = contourSlice.Get <int>(DicomTag.NumberOfContourPoints);
                    double[]      vertices     = contourSlice.Get <double[]>(DicomTag.ContourData);
                    PlanarPolygon poly         = new PlanarPolygon();
                    // we divide the number of vertices here by 1.5 because we are going from a 3d poly to a 2d poly on the z plane
                    poly.Vertices = new double[(int)(vertices.Length / 1.5)];
                    double zcoord    = vertices[2];
                    int    polyIndex = 0;

                    RegionOfInterestSlice slice = roi.GetSlice(zcoord);
                    if (slice == null)
                    {
                        slice = new RegionOfInterestSlice()
                        {
                            ZCoord = zcoord,
                        }
                    }
                    ;

                    for (int i = 0; i < vertices.Length; i += 3)
                    {
                        poly.Vertices[polyIndex]     = vertices[i];
                        poly.Vertices[polyIndex + 1] = vertices[i + 1];
                        if (vertices[i] < xmin)
                        {
                            xmin = vertices[i];
                        }
                        if (vertices[i] > xmax)
                        {
                            xmax = vertices[i];
                        }
                        if (vertices[i + 1] < ymin)
                        {
                            ymin = vertices[i + 1];
                        }
                        if (vertices[i + 1] > ymax)
                        {
                            ymax = vertices[i + 1];
                        }
                        if (zcoord < zmin)
                        {
                            zmin = zcoord;
                        }
                        if (zcoord > zmax)
                        {
                            zmax = zcoord;
                        }
                        polyIndex += 2;
                    }
                    if (zmin < roi.ZRange.Minimum)
                    {
                        roi.ZRange.Minimum = zmin;
                    }
                    if (zmax > roi.ZRange.Maximum)
                    {
                        roi.ZRange.Maximum = zmax;
                    }

                    slice.AddPolygon(poly);

                    roi.AddSlice(slice, zcoord);
                }

                roi.XRange = new Geometry.Range(xmin, xmax);
                roi.YRange = new Geometry.Range(ymin, ymax);
                roi.ZRange = new Geometry.Range(zmin, zmax);

                for (int i = 0; i < roi.RegionOfInterestSlices.Count; i++)
                {
                    for (int j = 0; j < roi.RegionOfInterestSlices[i].Polygons.Count; j++)
                    {
                        roi.RegionOfInterestSlices[i].Polygons[j].XRange = new Geometry.Range(xmin, xmax);
                        roi.RegionOfInterestSlices[i].Polygons[j].YRange = new Geometry.Range(ymin, ymax);
                    }
                    roi.RegionOfInterestSlices[i].ComputeBinaryMask();
                }
                rois.Add(roi);
            }
            GC.Collect();
            structureSet.ROIs = rois;
        }
    }
Ejemplo n.º 23
0
 protected bool Equals(TransformParameters other)
 {
     return(RegionOfInterest.Equals(other.RegionOfInterest) &&
            TargetFrameSize.Equals(other.TargetFrameSize) &&
            TargetFormat == other.TargetFormat && ScaleQuality == other.ScaleQuality);
 }
Ejemplo n.º 24
0
        void Start()
        {
            if (this.rosBridgeIP.Equals(string.Empty))
            {
                this.rosBridgeIP = ConfigManager.Instance.configInfo.rosbridgeIP;
            }
            if (this.sigverseBridgePort == 0)
            {
                this.sigverseBridgePort = ConfigManager.Instance.configInfo.sigverseBridgePort;
            }


            this.tcpClientCameraInfo = HSRCommon.GetSIGVerseRosbridgeConnection(this.rosBridgeIP, this.sigverseBridgePort);
            this.tcpClientImage      = HSRCommon.GetSIGVerseRosbridgeConnection(this.rosBridgeIP, this.sigverseBridgePort);

            this.networkStreamCameraInfo              = this.tcpClientCameraInfo.GetStream();
            this.networkStreamCameraInfo.ReadTimeout  = 100000;
            this.networkStreamCameraInfo.WriteTimeout = 100000;

            this.networkStreamImage              = this.tcpClientImage.GetStream();
            this.networkStreamImage.ReadTimeout  = 100000;
            this.networkStreamImage.WriteTimeout = 100000;

            // RGB Camera
            this.xtionRGBCamera = this.rgbCamera.GetComponentInChildren <Camera>();

            int imageWidth  = this.xtionRGBCamera.targetTexture.width;
            int imageHeight = this.xtionRGBCamera.targetTexture.height;

            this.imageTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);


            //  [camera/rgb/CameraInfo]
            string distortionModel = "plumb_bob";

            double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[] K = { 554, 0.0, 320, 0.0, 554, 240, 0.0, 0.0, 1.0 };
            double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[] P = { 554, 0.0, 320, 0.0, 0.0, 554, 240, 0.0, 0.0, 0.0, 1.0, 0.0 };

            //double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            //double[] K = { 554.3827128226441, 0.0, 320.5, 0.0, 554.3827128226441, 240.5, 0.0, 0.0, 1.0 };
            //double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            //double[] P = { 554.3827128226441, 0.0, 320.5, 0.0, 0.0, 554.3827128226441, 240.5, 0.0, 0.0, 0.0, 1.0, 0.0 };

            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            this.cameraInfoData = new CameraInfoForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, distortionModel, D, K, R, P, 0, 0, roi);

            //  [camera/rgb/Image_raw]
            string encoding    = "rgb8";
            byte   isBigendian = 0;
            uint   step        = (uint)imageWidth * 3;

            this.imageData = new ImageForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, encoding, isBigendian, step, null);

            this.header = new Header(0, new SIGVerse.ROSBridge.msg_helpers.Time(0, 0), this.rgbCamera.name);


            this.cameraInfoMsg = new SIGVerseROSBridgeMessage <CameraInfoForSIGVerseBridge>("publish", this.topicNameCameraInfo, CameraInfoForSIGVerseBridge.GetMessageType(), this.cameraInfoData);
            this.imageMsg      = new SIGVerseROSBridgeMessage <ImageForSIGVerseBridge>     ("publish", this.topicNameImage, ImageForSIGVerseBridge.GetMessageType(), this.imageData);
        }
Ejemplo n.º 25
0
 protected bool Equals(PostVideoDecodingParameters other)
 {
     return(RegionOfInterest.Equals(other.RegionOfInterest) &&
            TargetFrameSize.Equals(other.TargetFrameSize) &&
            TargetFormat == other.TargetFormat && ScaleQuality == other.ScaleQuality);
 }
        public void Initialize(string rosBridgeIP, int sigverseBridgePort, string topicNameCameraInfo, string topicNameImage, bool isUsingThread)
        {
            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameCameraInfo))
            {
                this.tcpClientCameraInfo = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameCameraInfo, this.tcpClientCameraInfo);
            }
            else
            {
                this.tcpClientCameraInfo = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameCameraInfo];
            }

            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameImage))
            {
                this.tcpClientImage = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameImage, this.tcpClientImage);
            }
            else
            {
                this.tcpClientImage = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameImage];
            }

            this.networkStreamCameraInfo              = this.tcpClientCameraInfo.GetStream();
            this.networkStreamCameraInfo.ReadTimeout  = 100000;
            this.networkStreamCameraInfo.WriteTimeout = 100000;

            this.networkStreamImage              = this.tcpClientImage.GetStream();
            this.networkStreamImage.ReadTimeout  = 100000;
            this.networkStreamImage.WriteTimeout = 100000;

            // RGB Camera
            this.rgbCamera = this.cameraFrameObj.GetComponentInChildren <Camera>();

            int imageWidth  = this.rgbCamera.targetTexture.width;
            int imageHeight = this.rgbCamera.targetTexture.height;

            this.imageTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);


            //  [camera/rgb/CameraInfo]
            string distortionModel = "plumb_bob";

            double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[] K = { 639, 0.0, 320, 0.0, 639, 240, 0.0, 0.0, 1.0 };             // memo: 639 = 465(depth) * tan(55/2) / tan(41.5/2)
            double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[] P = { 639, 0.0, 320, 0.0, 0.0, 639, 240, 0.0, 0.0, 0.0, 1.0, 0.0 };

//			double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
//			double[] K = { 615.8720092773438, 0.0, 314.9148254394531, 0.0, 615.8720703125, 238.09365844726562, 0.0, 0.0, 1.0 };
//			double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
//			double[] P = { 615.8720092773438, 0.0, 314.9148254394531, 0.0, 0.0, 615.8720703125, 238.09365844726562, 0.0, 0.0, 0.0, 1.0, 0.0 };

            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            this.cameraInfoData = new CameraInfoForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, distortionModel, D, K, R, P, 0, 0, roi);

            //  [camera/rgb/Image_raw]
            string encoding    = "rgb8";
            byte   isBigendian = 0;
            uint   step        = (uint)imageWidth * 3;

            this.imageData = new ImageForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, encoding, isBigendian, step, null);

            this.header = new Header(0, new SIGVerse.RosBridge.msg_helpers.Time(0, 0), this.cameraFrameObj.name);


            this.cameraInfoMsg = new SIGVerseRosBridgeMessage <CameraInfoForSIGVerseBridge>("publish", topicNameCameraInfo, CameraInfoForSIGVerseBridge.GetMessageType(), this.cameraInfoData);
            this.imageMsg      = new SIGVerseRosBridgeMessage <ImageForSIGVerseBridge>     ("publish", topicNameImage, ImageForSIGVerseBridge.GetMessageType(), this.imageData);

            this.isUsingThread = isUsingThread;
        }
        public void Initialize(string rosBridgeIP, int sigverseBridgePort, string topicNameCameraInfo, string topicNameImage, bool isUsingThread)
        {
            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameCameraInfo))
            {
                this.tcpClientCameraInfo = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameCameraInfo, this.tcpClientCameraInfo);
            }
            else
            {
                this.tcpClientCameraInfo = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameCameraInfo];
            }

            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameImage))
            {
                this.tcpClientImage = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameImage, this.tcpClientImage);
            }
            else
            {
                this.tcpClientImage = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameImage];
            }

            this.networkStreamCameraInfo              = this.tcpClientCameraInfo.GetStream();
            this.networkStreamCameraInfo.ReadTimeout  = 100000;
            this.networkStreamCameraInfo.WriteTimeout = 100000;

            this.networkStreamImage              = this.tcpClientImage.GetStream();
            this.networkStreamImage.ReadTimeout  = 100000;
            this.networkStreamImage.WriteTimeout = 100000;

            // RGB Camera
            this.rgbCamera = this.cameraFrameObj.GetComponentInChildren <Camera>();

            int imageWidth  = this.rgbCamera.targetTexture.width;
            int imageHeight = this.rgbCamera.targetTexture.height;

            this.imageTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);


            //  [camera/rgb/CameraInfo]
            string distortionModel = "plumb_bob";

            double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[] K = { 639, 0.0, 320, 0.0, 639, 240, 0.0, 0.0, 1.0 };             // memo: 639 = 445(depth) * tan(46/2) / tan(43/2) * 480/360
            double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[] P = { 639, 0.0, 320, 0.0, 0.0, 639, 240, 0.0, 0.0, 0.0, 1.0, 0.0 };

//			double[] D = { -0.09601674973964691, 0.09204437583684921, -0.0017978776013478637, 0.0009506311034783721, 0.0 };
//			double[] K = { 618.6206665039062, 0.0, 310.1082458496094, 0.0, 624.5146484375, 230.0232696533203, 0.0, 0.0, 1.0 };
//			double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
//			double[] P = { 618.6206665039062, 0.0, 310.1082458496094, 0.0, 0.0, 624.5146484375, 230.0232696533203, 0.0, 0.0, 0.0, 1.0, 0.0 };

            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            this.cameraInfoData = new CameraInfoForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, distortionModel, D, K, R, P, 0, 0, roi);

            //  [camera/rgb/Image_raw]
            string encoding    = "rgb8";
            byte   isBigendian = 0;
            uint   step        = (uint)imageWidth * 3;

            this.imageData = new ImageForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, encoding, isBigendian, step, null);

            this.header = new Header(0, new SIGVerse.RosBridge.msg_helpers.Time(0, 0), this.cameraFrameObj.name);


            this.cameraInfoMsg = new SIGVerseRosBridgeMessage <CameraInfoForSIGVerseBridge>("publish", topicNameCameraInfo, CameraInfoForSIGVerseBridge.GetMessageType(), this.cameraInfoData);
            this.imageMsg      = new SIGVerseRosBridgeMessage <ImageForSIGVerseBridge>     ("publish", topicNameImage, ImageForSIGVerseBridge.GetMessageType(), this.imageData);

            this.isUsingThread = isUsingThread;
        }
Ejemplo n.º 28
0
        public void Initialize(string rosBridgeIP, int sigverseBridgePort, string topicNameCameraInfo, string topicNameImage, bool isRight, bool isUsingThread)
        {
            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameCameraInfo))
            {
                this.tcpClientCameraInfo = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameCameraInfo, this.tcpClientCameraInfo);
            }
            else
            {
                this.tcpClientCameraInfo = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameCameraInfo];
            }

            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameImage))
            {
                this.tcpClientImage = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameImage, this.tcpClientImage);
            }
            else
            {
                this.tcpClientImage = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameImage];
            }

            this.networkStreamCameraInfo              = this.tcpClientCameraInfo.GetStream();
            this.networkStreamCameraInfo.ReadTimeout  = 100000;
            this.networkStreamCameraInfo.WriteTimeout = 100000;

            this.networkStreamImage              = this.tcpClientImage.GetStream();
            this.networkStreamImage.ReadTimeout  = 100000;
            this.networkStreamImage.WriteTimeout = 100000;


            // RGB Camera
            this.rgbCamera = this.cameraFrameObj.GetComponentInChildren <Camera>();

            int imageWidth  = this.rgbCamera.targetTexture.width;
            int imageHeight = this.rgbCamera.targetTexture.height;

            this.imageTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);


            //  [camera/rgb/CameraInfo]
            string distortionModel = "plumb_bob";

            double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[] K = { 968.765, 0.0, 640, 0.0, 968.77, 480, 0.0, 0.0, 1.0 };
            double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[] P = { 968.765, 0.0, 640, 0.0, 0.0, 968.77, 480, 0.0, 0.0, 0.0, 1.0, 0.0 };

            if (isRight)
            {
                P[3] = -135.627;                  // -135.627 = - 968.765 * 0.14(baseline=distance between both eyes)
            }

            //double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            //double[] K = { 968.7653251755174, 0.0, 640.5, 0.0, 968.7653251755174, 480.5, 0.0, 0.0, 1.0 };
            //double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            // Left Camera
            //double[] P = { 968.7653251755174, 0.0, 640.5, -0.0, 0.0, 968.7653251755174, 480.5, 0.0, 0.0, 0.0, 1.0, 0.0 };
            // Right Camera
            //double[] P = { 968.7653251755174, 0.0, 640.5, -135.62714552457246, 0.0, 968.7653251755174, 480.5, 0.0, 0.0, 0.0, 1.0, 0.0 };

            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            this.cameraInfoData = new CameraInfoForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, distortionModel, D, K, R, P, 0, 0, roi);

            //  [camera/rgb/Image_raw]
            string encoding    = "rgb8";
            byte   isBigendian = 0;
            uint   step        = (uint)imageWidth * 3;

            this.imageData = new ImageForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, encoding, isBigendian, step, null);

            this.header = new Header(0, new SIGVerse.RosBridge.msg_helpers.Time(0, 0), this.cameraFrameObj.name);

            this.cameraInfoMsg = new SIGVerseRosBridgeMessage <CameraInfoForSIGVerseBridge>("publish", topicNameCameraInfo, CameraInfoForSIGVerseBridge.GetMessageType(), this.cameraInfoData);
            this.imageMsg      = new SIGVerseRosBridgeMessage <ImageForSIGVerseBridge>     ("publish", topicNameImage, ImageForSIGVerseBridge.GetMessageType(), this.imageData);

            this.isUsingThread = isUsingThread;
        }
        public async Task AutoFocusAsync(int x, int y, bool useCoordinates)
        {
            if (IsFocusSupported)
            {
                var focusControl = mediaCapture.VideoDeviceController.FocusControl;
                var roiControl = mediaCapture.VideoDeviceController.RegionsOfInterestControl;
                try
                {
                    if (roiControl.AutoFocusSupported && roiControl.MaxRegions > 0)
                    {
                        if (useCoordinates)
                        {
                            var props = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

                            var previewEncodingProperties = GetPreviewResolution(props);
                            var previewRect = GetPreviewStreamRectInControl(previewEncodingProperties, captureElement);
                            var focusPreview = ConvertUiTapToPreviewRect(new Point(x, y), new Size(20, 20), previewRect);
                            var regionOfInterest = new RegionOfInterest
                            {
                                AutoFocusEnabled = true,
                                BoundsNormalized = true,
                                Bounds = focusPreview,
                                Type = RegionOfInterestType.Unknown,
                                Weight = 100
                            };
                            await roiControl.SetRegionsAsync(new[] { regionOfInterest }, true);

                            var focusRange = focusControl.SupportedFocusRanges.Contains(AutoFocusRange.FullRange)
                                ? AutoFocusRange.FullRange
                                : focusControl.SupportedFocusRanges.FirstOrDefault();

                            var focusMode = focusControl.SupportedFocusModes.Contains(FocusMode.Single)
                                ? FocusMode.Single
                                : focusControl.SupportedFocusModes.FirstOrDefault();

                            var settings = new FocusSettings
                            {
                                Mode = focusMode,
                                AutoFocusRange = focusRange,
                            };

                            focusControl.Configure(settings);
                        }
                        else
                        {
                            // If no region provided, clear any regions and reset focus
                            await roiControl.ClearRegionsAsync();
                        }
                    }

                    await focusControl.FocusAsync();
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("AutoFocusAsync Error: {0}", ex);
                }
            }
        }
Ejemplo n.º 30
0
        private void ResizeRegionOfInterestWithGestureRecognizer(UIPanGestureRecognizer gestureRecognizer)
        {
            var touchLocation       = gestureRecognizer.LocationInView(gestureRecognizer.View);
            var oldRegionOfInterest = this.RegionOfInterest;

            switch (gestureRecognizer.State)
            {
            case UIGestureRecognizerState.Began:
                // When the gesture begins, save the corner that is closes to
                // the resize region of interest gesture recognizer's touch location.
                this.currentControlCorner = CornerOfRect(oldRegionOfInterest, touchLocation);
                break;


            case UIGestureRecognizerState.Changed:
                var newRegionOfInterest = oldRegionOfInterest;

                switch (this.currentControlCorner)
                {
                case ControlCorner.None:
                    // Update the new region of interest with the gesture recognizer's translation.
                    var translation = gestureRecognizer.TranslationInView(gestureRecognizer.View);

                    // Move the region of interest with the gesture recognizer's translation.
                    if (this.RegionOfInterest.Contains(touchLocation))
                    {
                        newRegionOfInterest.X += translation.X;
                        newRegionOfInterest.Y += translation.Y;
                    }

                    // If the touch location goes outside the preview layer,
                    // we will only translate the region of interest in the
                    // plane that is not out of bounds.
                    var normalizedRect = new CGRect(0, 0, 1, 1);
                    if (!normalizedRect.Contains(this.VideoPreviewLayer.PointForCaptureDevicePointOfInterest(touchLocation)))
                    {
                        if (touchLocation.X < RegionOfInterest.GetMinX() || touchLocation.X > RegionOfInterest.GetMaxX())
                        {
                            newRegionOfInterest.Y += translation.Y;
                        }
                        else if (touchLocation.Y < RegionOfInterest.GetMinY() || touchLocation.Y > RegionOfInterest.GetMaxY())
                        {
                            newRegionOfInterest.X += translation.X;
                        }
                    }

                    // Set the translation to be zero so that the new gesture
                    // recognizer's translation is in respect to the region of
                    // interest's new position.
                    gestureRecognizer.SetTranslation(CGPoint.Empty, gestureRecognizer.View);
                    break;

                case ControlCorner.TopLeft:
                    newRegionOfInterest = new CGRect(touchLocation.X, touchLocation.Y,
                                                     oldRegionOfInterest.Width + oldRegionOfInterest.X - touchLocation.X,
                                                     oldRegionOfInterest.Height + oldRegionOfInterest.Y - touchLocation.Y);
                    break;

                case ControlCorner.TopRight:
                    newRegionOfInterest = new CGRect(newRegionOfInterest.X,
                                                     touchLocation.Y,
                                                     touchLocation.X - newRegionOfInterest.X,
                                                     oldRegionOfInterest.Height + newRegionOfInterest.Y - touchLocation.Y);
                    break;


                case ControlCorner.BottomLeft:
                    newRegionOfInterest = new CGRect(touchLocation.X,
                                                     oldRegionOfInterest.Y,
                                                     oldRegionOfInterest.Width + oldRegionOfInterest.X - touchLocation.X,
                                                     touchLocation.Y - oldRegionOfInterest.Y);
                    break;

                case ControlCorner.BottomRight:
                    newRegionOfInterest = new CGRect(oldRegionOfInterest.X,
                                                     oldRegionOfInterest.Y,
                                                     touchLocation.X - oldRegionOfInterest.X,
                                                     touchLocation.Y - oldRegionOfInterest.Y);
                    break;
                }

                // Update the region of interest with a valid CGRect.
                this.SetRegionOfInterestWithProposedRegionOfInterest(newRegionOfInterest);
                break;

            case UIGestureRecognizerState.Ended:
                this.RegionOfInterestChanged?.Invoke(this, EventArgs.Empty);

                // Reset the current corner reference to none now that the resize.
                // gesture recognizer has ended.
                this.currentControlCorner = ControlCorner.None;
                break;

            default:
                return;
            }
        }
        /// <summary>
        /// Sets camera to focus on the passed in region of interest
        /// </summary>
        /// <param name="region">The region to focus on, or null to focus on the default region</param>
        /// <returns></returns>
        private async Task<MediaCaptureFocusState> FocusCamera(RegionOfInterest region)
        {
            var roiControl = _mediaCapture.VideoDeviceController.RegionsOfInterestControl;
            var focusControl = _mediaCapture.VideoDeviceController.FocusControl;

            if (region != null)
            {
                // If the call provided a region, then set it
                await roiControl.SetRegionsAsync(new[] { region }, true);

                var focusRange = focusControl.SupportedFocusRanges.Contains(AutoFocusRange.FullRange) ? AutoFocusRange.FullRange : focusControl.SupportedFocusRanges.FirstOrDefault();
                var focusMode = focusControl.SupportedFocusModes.Contains(FocusMode.Single) ? FocusMode.Single : focusControl.SupportedFocusModes.FirstOrDefault();

                var settings = new FocusSettings { Mode = focusMode, AutoFocusRange = focusRange };

                focusControl.Configure(settings);
            }
            else
            {
                // If no region provided, clear any regions and reset focus
                await roiControl.ClearRegionsAsync();
            }

            await focusControl.FocusAsync();

            return focusControl.FocusState;
        }
        public void Initialize(string rosBridgeIP, int sigverseBridgePort, string topicNameCameraInfo, string topicNameImage, bool isUsingThread)
        {
            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameCameraInfo))
            {
                this.tcpClientCameraInfo = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameCameraInfo, this.tcpClientCameraInfo);
            }
            else
            {
                this.tcpClientCameraInfo = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameCameraInfo];
            }

            if (!RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.ContainsKey(topicNameImage))
            {
                this.tcpClientImage = SIGVerseRosBridgeConnection.GetConnection(rosBridgeIP, sigverseBridgePort);

                RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap.Add(topicNameImage, this.tcpClientImage);
            }
            else
            {
                this.tcpClientImage = RosConnectionManager.Instance.rosConnections.sigverseRosBridgeTcpClientMap[topicNameImage];
            }

            this.networkStreamCameraInfo              = this.tcpClientCameraInfo.GetStream();
            this.networkStreamCameraInfo.ReadTimeout  = 100000;
            this.networkStreamCameraInfo.WriteTimeout = 100000;

            this.networkStreamImage              = this.tcpClientImage.GetStream();
            this.networkStreamImage.ReadTimeout  = 100000;
            this.networkStreamImage.WriteTimeout = 100000;


            // Depth Camera
            this.depthCamera = this.cameraFrameObj.GetComponentInChildren <Camera>();

            int imageWidth  = this.depthCamera.targetTexture.width;
            int imageHeight = this.depthCamera.targetTexture.height;

            this.byteArray = new byte[imageWidth * imageHeight * 2];

            for (int i = 0; i < this.byteArray.Length; i++)
            {
                this.byteArray[i] = 0;
            }

            this.imageTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);


            //  [camera/depth/CameraInfo]
            string distortionModel = "plumb_bob";

            double[] D = { 0.0, 0.0, 0.0, 0.0, 0.0 };
            double[] K = { 465, 0.0, 320, 0.0, 465, 240, 0.0, 0.0, 1.0 };
            double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
            double[] P = { 465, 0.0, 320, 0.0, 0.0, 465, 240, 0.0, 0.0, 0.0, 1.0, 0.0 };

//			double[] D = { 0.14078746736049652, 0.07252906262874603, 0.004671256057918072, 0.0014421826926991343, 0.06731976568698883 };
//			double[] K = { 475.25030517578125, 0.0, 333.3515625, 0.0, 475.2502136230469, 245.8830108642578, 0.0, 0.0, 1.0 };
//			double[] R = { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 };
//			double[] P = { 475.25030517578125, 0.0, 333.3515625, 0.024700000882148743, 0.0, 475.2502136230469, 245.8830108642578, -0.0007332635577768087, 0.0, 0.0, 1.0, 0.004069563001394272 };

            RegionOfInterest roi = new RegionOfInterest(0, 0, 0, 0, false);

            this.cameraInfoData = new CameraInfoForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, distortionModel, D, K, R, P, 0, 0, roi);

            //  [camera/depth/Image_raw]
            string encoding    = "16UC1";
            byte   isBigendian = 0;
            uint   step        = (uint)imageWidth * 2;

            this.imageData = new ImageForSIGVerseBridge(null, (uint)imageHeight, (uint)imageWidth, encoding, isBigendian, step, null);

            this.header = new Header(0, new SIGVerse.RosBridge.msg_helpers.Time(0, 0), this.cameraFrameObj.name);

            this.cameraInfoMsg = new SIGVerseRosBridgeMessage <CameraInfoForSIGVerseBridge>("publish", topicNameCameraInfo, CameraInfoForSIGVerseBridge.GetMessageType(), this.cameraInfoData);
            this.imageMsg      = new SIGVerseRosBridgeMessage <ImageForSIGVerseBridge>     ("publish", topicNameImage, ImageForSIGVerseBridge.GetMessageType(), this.imageData);

            this.isUsingThread = isUsingThread;
        }
        /// <summary>
        /// Focus the camera on the given rectangle of the preview, defined by the position and size parameters, in UI coordinates within the CaptureElement
        /// </summary>
        /// <param name="position">The position of the tap, to become the center of the focus rectangle</param>
        /// <param name="size">the size of the rectangle around the tap</param>
        /// <returns></returns>
        public async Task TapToFocus(Point position, Size size)
        {
            // Transition to the "focused" state
            _isFocused = true;

            var previewEncodingProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            var previewRect = GetPreviewStreamRectInControl(previewEncodingProperties, PreviewControl, _displayOrientation);

            // Build UI rect that will highlight the tapped area
            FocusRectangle.Width = size.Width;
            FocusRectangle.Height = size.Height;

            // Adjust for the rect to be centered around the tap position 
            var left = position.X - FocusRectangle.Width / 2;
            var top = position.Y - FocusRectangle.Height / 2;

            // Move it so it's contained within the preview stream in the UI and doesn't reach into letterboxing area or outside of window bounds

            // The left and right edges should not be outside the active preview area within the CaptureElement
            left = Math.Max(previewRect.Left, left);
            left = Math.Min(previewRect.Width - FocusRectangle.Width + previewRect.Left, left);

            // The top and bottom edges should not be outside the active preview area within the CaptureElement
            top = Math.Max(previewRect.Top, top);
            top = Math.Min(previewRect.Height - FocusRectangle.Height + previewRect.Top, top);

            // Apply the adjusted position to the FocusRectangle
            Canvas.SetLeft(FocusRectangle, left);
            Canvas.SetTop(FocusRectangle, top);

            FocusRectangle.Stroke = new SolidColorBrush(Colors.White);
            FocusRectangle.Visibility = Visibility.Visible;

            // FocusRectangle exists in UI coordinates, need to convert to preview coordinates and adjust for rotation if necessary
            var focusPreview = ConvertUiTapToPreviewRect(position, size, previewRect);

            // Note that this Region Of Interest could be configured to also calculate exposure and white balance within the region
            var regionOfInterest = new RegionOfInterest
            {
                AutoFocusEnabled = true,
                BoundsNormalized = true,
                Bounds = focusPreview,
                Type = RegionOfInterestType.Unknown,
                Weight = 100,
            };

            var result = await FocusCamera(regionOfInterest);

            // Update the UI based on the result of the focusing operation
            FocusRectangle.Stroke = (result == MediaCaptureFocusState.Focused ? new SolidColorBrush(Colors.Lime) : new SolidColorBrush(Colors.Red));
        }