void Ahrs_ValueChanged(IHardwareComponent sender) { IAnalogSensor a = (IAnalogSensor)sender; int val = a.AnalogValue; // 0v = 0, 5V = 470 approx /* * Yaw PWM val * -------------------- * */ double heading; if (val > 507) { heading = GeneralMath.map(val, 508, 1024, 0, 180); } else { heading = GeneralMath.map(val, 0, 507, 180, 360); } //Debug.WriteLine("Ahrs: Value=" + val + " heading: " + heading); lock (currentSensorsDataLock) { ISensorsData sensorsData = new SensorsDataShorty(this.currentSensorsData); sensorsData.CompassHeadingDegrees = heading; //Debug.WriteLine(sensorsData.ToString()); this.currentSensorsData = sensorsData; } }
private void RPiCameraSensor_TargetsChanged(object sender, TargetingCameraEventArgs args) { // Raspberry Pi based camera sensor works under Wheezy and uses OpenCV and Python to process // 240x320 frames and select areas with yellow color. Bearing, inclination and size of blobs is then // reported over HTTP to RPiCamera (derived from HttpServerBase). Frequency is around 10 FPS. //Debug.WriteLine("RPi Camera Event: " + args); // On Raspberry Pi: // pixy.blocks[i].signature The signature number of the detected object (1-7) // pixy.blocks[i].x The x location of the center of the detected object (0 to 319) // pixy.blocks[i].y The y location of the center of the detected object (0 to 199) // pixy.blocks[i].width The width of the detected object (1 to 320) // pixy.blocks[i].height The height of the detected object (1 to 200) // Field of view: // goal 45 degrees left x=10 // middle x=160 // goal 45 degrees right x=310 // // goal 30 degrees up y=10 // middle y=90 // goal 30 degrees down y=190 // if (args.width * args.height > 500) // only large objects count { int bearing = GeneralMath.map(args.x, 0, 320, 45, -45); int inclination = GeneralMath.map(args.y, 0, 200, 30, -30); //Debug.WriteLine("RPi: bearing=" + bearing + " inclination: " + inclination); lock (currentSensorsDataLock) { ISensorsData sensorsData = new SensorsDataPlucky(this.currentSensorsData); sensorsData.TargetingCameraBearingDegrees = bearing; sensorsData.TargetingCameraInclinationDegrees = inclination; sensorsData.TargetingCameraTimestamp = args.timestamp; //Debug.WriteLine(sensorsData.ToString()); this.currentSensorsData = sensorsData; } } }
void PixyCameraSensor_PixyCameraBlocksChanged(object sender, TargetingCameraEventArgs args) { //Debug.WriteLine("Pixy Camera Event: " + args); // On Arduino: // pixy.blocks[i].signature The signature number of the detected object (1-7) // pixy.blocks[i].x The x location of the center of the detected object (0 to 319) // pixy.blocks[i].y The y location of the center of the detected object (0 to 199) // pixy.blocks[i].width The width of the detected object (1 to 320) // pixy.blocks[i].height The height of the detected object (1 to 200) // Field of view: // goal 45 degrees left x=10 // middle x=160 // goal 45 degrees right x=310 // // goal 30 degrees up y=10 // middle y=90 // goal 30 degrees down y=190 // if (args.width * args.height > 500) // only large objects count { int bearing = GeneralMath.map(args.x, 0, 320, -45, 45); int inclination = GeneralMath.map(args.y, 0, 200, 30, -30); //Debug.WriteLine("Pixy: bearing=" + bearing + " inclination: " + inclination); lock (currentSensorsDataLock) { ISensorsData sensorsData = new SensorsDataShorty(this.currentSensorsData); sensorsData.TargetingCameraBearingDegrees = bearing; sensorsData.TargetingCameraInclinationDegrees = inclination; sensorsData.TargetingCameraTimestamp = args.timestamp; //Debug.WriteLine(sensorsData.ToString()); this.currentSensorsData = sensorsData; } } }