void canvas_MouseMove(object sender, MouseEventArgs mea) { try { Point pos = mea.GetPosition(canvasRel); // first get it in pixels, relative to center point: double x = pos.X - canvasRel.ActualWidth / 2.0d; double y = -(pos.Y - canvasRel.ActualHeight / 2.0d); // and now convert to meters, relative to robot center: x *= metersPerPixelX; y *= metersPerPixelY; MapperVicinity mapperVicinity = CurrentMapper; GeoPosition gp = new GeoPosition(mapperVicinity.robotPosition); gp.translate(new Distance(x), new Distance(y)); RoutedEventArgsMouseMoved newEventArgs = new RoutedEventArgsMouseMoved(MapperViewControl.MouseMovedEvent) { xMeters = x, yMeters = y, geoPosition = gp }; RaiseEvent(newEventArgs); } catch { } }
/// <summary> /// update map with the detected objects /// </summary> /// <param name="timestamp"></param> private void updateMapperVicinity(long timestamp, SonarData p) { if (lastRangeReadingSetChanged && lastRangeReadingSet.sweepFrameReady && p.angles.Keys.Count == 26) { RangeReading rrFirst = p.getLatestReadingAt(p.angles.Keys.First()); RangeReading rrLast = p.getLatestReadingAt(p.angles.Keys.Last()); double sweepAngle = rrFirst.angleDegrees - rrLast.angleDegrees; double forwardAngle = sweepAngle / 2.0d; foreach (int angle in p.angles.Keys) { RangeReading rr = p.getLatestReadingAt(angle); double rangeMeters = rr.rangeMeters + robotCornerDistanceMeters; // sensor is on the corner, adjust for that double relBearing = rr.angleDegrees - forwardAngle; GeoPosition pos1 = (GeoPosition)mapperVicinity.robotPosition.Clone(); pos1.translate(new Direction() { heading = mapperVicinity.robotDirection.heading, bearingRelative = relBearing }, new Distance(rangeMeters)); DetectedObstacle dobst1 = new DetectedObstacle(pos1) { geoPosition = pos1, firstSeen = timestamp, lastSeen = timestamp, color = Colors.Red, detectorType = DetectorType.SONAR_SCANNING, objectType = DetectedObjectType.Obstacle }; mapperVicinity.Add(dobst1); } // make sure we have the latest heading info in the mapper, and map positions have been computed: Direction dir = new Direction() { heading = lastDirData.heading, bearing = mapperVicinity.robotDirection.bearing, TimeStamp = timestamp }; mapperVicinity.robotDirection = dir; // will call mapperVicinity.computeMapPositions(); updateMapWindow(); } mapperTraceLabel.Content = "" + mapperVicinity.detectedObjects.Count + " objects"; }
private static double[] angles8 = { 37.5d, 62.5d, 117.5d, 142.5d, 217.5d, 242.5d, -62.5d, -37.5d }; // eight IR sensors on the corners protected void updateMapperWithProximityData(proxibrick.ProximityDataDssSerializable proximityData) { double[] arrangedForMapper = new double[8]; arrangedForMapper[4] = proximityData.mfl; arrangedForMapper[5] = proximityData.mffl; arrangedForMapper[6] = proximityData.mffr; arrangedForMapper[7] = proximityData.mfr; arrangedForMapper[3] = proximityData.mbl; arrangedForMapper[2] = proximityData.mbbl; arrangedForMapper[1] = proximityData.mbbr; arrangedForMapper[0] = proximityData.mbr; for (int i = 0; i < arrangedForMapper.GetLength(0); i++) { double rangeMeters = arrangedForMapper[i]; if (rangeMeters < 1.4d) { rangeMeters += robotCornerDistanceMeters; // sensor is on the corner, adjust for that double relBearing = angles8[i] + 90.0d; GeoPosition pos1 = (GeoPosition)_mapperVicinity.robotPosition.Clone(); pos1.translate(new Direction() { heading = _mapperVicinity.robotDirection.heading, bearingRelative = relBearing }, new Distance(rangeMeters)); DetectedObstacle dobst1 = new DetectedObstacle() { geoPosition = pos1, firstSeen = proximityData.TimeStamp.Ticks, lastSeen = proximityData.TimeStamp.Ticks, detectorType = DetectorType.IR_DIRECTED, objectType = DetectedObjectType.Obstacle }; dobst1.SetColorByType(); lock (_mapperVicinity) { _mapperVicinity.Add(dobst1); } } } }
private const int step = 6; // for speed and given that we are actually dealing with sonar data, pick only Nth points. protected void updateMapperWithLaserData(sicklrf.State laserData) { int numRays = laserData.DistanceMeasurements.Length; List <IDetectedObject> laserObjects = new List <IDetectedObject>(numRays / step + 5); for (int i = 0; i < laserData.DistanceMeasurements.Length; i += step) { double rangeMeters = laserData.DistanceMeasurements[i] / 1000.0d; // DistanceMeasurements is in millimeters; if (rangeMeters > minReliableRangeMeters && rangeMeters < maxReliableRangeMeters) { double relBearing = forwardAngle - i * 180.0d / numRays; GeoPosition pos1 = (GeoPosition)_mapperVicinity.robotPosition.Clone(); pos1.translate(new Direction() { heading = _mapperVicinity.robotDirection.heading, bearingRelative = relBearing }, new Distance(rangeMeters)); DetectedObstacle dobst1 = new DetectedObstacle() { geoPosition = pos1, firstSeen = laserData.TimeStamp.Ticks, lastSeen = laserData.TimeStamp.Ticks, detectorType = DetectorType.SONAR_SCANNING, objectType = DetectedObjectType.Obstacle }; dobst1.SetColorByType(); laserObjects.Add(dobst1); } } if (laserObjects.Count > 0) { int countBefore = 0; lock (_mapperVicinity) { countBefore = _mapperVicinity.Count; _mapperVicinity.AddRange(laserObjects); _mapperVicinity.computeMapPositions(); } //Tracer.Trace(string.Format("laser ready - added {0} to {1}", laserObjects.Count, countBefore)); } }
protected double robotCornerDistanceMeters = 0.0d; // to account for robot dimensions when adding measurements from corner-located proximity sensors. public MainWindow() { InitializeComponent(); routePlanner = new RoutePlanner(mapperVicinity); this.Closing += new System.ComponentModel.CancelEventHandler(MainWindow_Closing); //create picpxmod device. _picpxmod = new ProximityModule(0x0925, 0x7001); // see PIC Firmware - usb_descriptors.c lines 178,179 _picpxmod.HasReadFrameEvent += pmFrameCompleteHandler; _picpxmod.DeviceAttachedEvent += new EventHandler <ProximityModuleEventArgs>(_picpxmod_DeviceAttachedEvent); _picpxmod.DeviceDetachedEvent += new EventHandler <ProximityModuleEventArgs>(_picpxmod_DeviceDetachedEvent); bool deviceAttached = _picpxmod.FindTheHid(); pmValuesLabel.Content = deviceAttached ? "Proximity Board Found" : string.Format("Proximity Board NOT Found\r\nYour USB Device\r\nmust have:\r\n vendorId=0x{0:X}\r\nproductId=0x{1:X}", _picpxmod.vendorId, _picpxmod.productId); mapperVicinity.robotPosition = (GeoPosition)robotPositionDefault.Clone(); mapperVicinity.robotDirection = (Direction)robotDirectionDefault.Clone(); // we will need this later: robotCornerDistanceMeters = Math.Sqrt(mapperVicinity.robotState.robotLengthMeters * mapperVicinity.robotState.robotLengthMeters + mapperVicinity.robotState.robotWidthMeters * mapperVicinity.robotState.robotWidthMeters) / 2.0d; // --------- debug ------------ GeoPosition pos1 = (GeoPosition)mapperVicinity.robotPosition.Clone(); //pos1.translate(new Distance(1.0d), new Distance(1.0d)); // geo coordinates - East North pos1.translate(new Direction() { heading = mapperVicinity.robotDirection.heading, bearingRelative = 45.0d }, new Distance(1.0d)); // robot coordinates - forward right DetectedObstacle dobst1 = new DetectedObstacle(pos1) { color = Colors.Red }; mapperVicinity.Add(dobst1); GeoPosition pos2 = (GeoPosition)mapperVicinity.robotPosition.Clone(); //pos2.translate(new Distance(-1.0d), new Distance(1.0d)); // geo coordinates - West North pos2.translate(new Direction() { heading = mapperVicinity.robotDirection.heading, bearingRelative = -45.0d }, new Distance(1.0d)); // robot coordinates - forward left DetectedObstacle dobst2 = new DetectedObstacle(pos2) { color = Colors.Yellow }; //mapperVicinity.Add(dobst2); mapperVicinity.computeMapPositions(); // --------- end debug ------------ }
/// <summary> /// Main read loop /// Read raw frame from Kinect service, then process it asynchronously, then request UI update /// </summary> /// <returns>A standard CCR iterator.</returns> private IEnumerator <ITask> ReadKinectLoop() { // note: see frame rate at C:\Microsoft Robotics Dev Studio 4\projects\TrackRoamer\TrackRoamerServices\Config\TrackRoamer.TrackRoamerBot.Kinect.Config.xml while (true) { try { kinectProxy.QueryRawFrameRequest frameRequest = new kinectProxy.QueryRawFrameRequest(); frameRequest.IncludeDepth = this.IncludeDepth; frameRequest.IncludeVideo = this.IncludeVideo; frameRequest.IncludeSkeletons = this.IncludeSkeletons; if (!this.IncludeDepth && !this.IncludeVideo && !this.IncludeSkeletons) { // poll 2 times a sec if user for some reason deselected all image options (this would turn into a busy loop then) yield return(TimeoutPort(KinectLoopWaitIntervalMs).Receive()); } kinect.RawKinectFrames rawFrames = null; // poll depth camera yield return(this.kinectPort.QueryRawFrame(frameRequest).Choice( rawFrameResponse => { rawFrames = rawFrameResponse.RawFrames; }, failure => { if (!this.atLeastOneFrameQueryFailed) { this.atLeastOneFrameQueryFailed = true; LogError(failure); } })); this.frameProcessor.currentPanKinect = _state.currentPanKinect; this.frameProcessor.currentTiltKinect = _state.currentTiltKinect; this.frameProcessor.SetRawFrame(rawFrames); if (null != rawFrames.RawSkeletonFrameData) { yield return(new IterativeTask(this.frameProcessor.ProcessSkeletons)); var tmpAllSkeletons = frameProcessor.AllSkeletons; // get a snapshot of the pointer to allocated array, and then take sweet time processing it knowing it will not change if (tmpAllSkeletons != null) { // tmpAllSkeletons is a list of seven skeletons, those good enough for processing have IsSkeletonActive true var skels = from s in tmpAllSkeletons where s.IsSkeletonActive select s; foreach (VisualizableSkeletonInformation skel in skels) { // Kinect Z goes straight forward, X - to the left side, Y - up double kZ = skel.JointPoints[nui.JointType.Spine].Z; // meters, relative to Kinect camera double kX = skel.JointPoints[nui.JointType.Spine].X; GeoPosition pos1 = (GeoPosition)_mapperVicinity.robotPosition.Clone(); double relBearing = Direction.to180fromRad(Math.Atan2(-kX, kZ)); double rangeMeters = Math.Sqrt(kZ * kZ + kX * kX); pos1.translate(new Direction() { heading = _mapperVicinity.robotDirection.heading, bearingRelative = relBearing }, new Distance(rangeMeters)); DetectedHuman dhum1 = new DetectedHuman() { geoPosition = pos1, firstSeen = DateTime.Now.Ticks, lastSeen = DateTime.Now.Ticks, detectorType = DetectorType.KINECT_SKELETON, }; lock (_mapperVicinity) { _mapperVicinity.Add(dhum1); } } } } if (null != rawFrames.RawColorFrameData) { yield return(new IterativeTask(this.frameProcessor.ProcessImageFrame)); // RGB Video } if (null != rawFrames.RawDepthFrameData) { yield return(new IterativeTask(this.frameProcessor.ProcessDepthFrame)); // Depth information frame } this.UpdateUI(this.frameProcessor); Decide(SensorEventSource.Kinect); // poll state at low frequency to see if tilt has shifted (may happen on an actual robot due to shaking) if (common.Utilities.ElapsedSecondsSinceStart - this.lastStateReadTime > 1) { yield return(this.kinectPort.Get().Choice( kinectState => { this.UpdateState(kinectState); // update value displayed in WPF window _state.currentTiltKinect = kinectState.TiltDegrees; }, failure => { if (!this.atLeastOneTiltPollFailed) { this.atLeastOneTiltPollFailed = true; LogError(failure); } })); this.lastStateReadTime = common.Utilities.ElapsedSecondsSinceStart; } } finally { } } }