// MapKit alternative public static double MKGetDistanceBetween(Airport airport1, Airport airport2) { CLLocationCoordinate2D loc1 = new CLLocationCoordinate2D (airport1.Latitude, airport1.Longitude); CLLocationCoordinate2D loc2 = new CLLocationCoordinate2D (airport2.Latitude, airport2.Longitude); MKMapPoint point1 = MKMapPoint.FromCoordinate (loc1); MKMapPoint point2 = MKMapPoint.FromCoordinate (loc2); return MKGeometry.MetersBetweenMapPoints (point1, point2) / MetersPerNauticalMile; }
public AirportAnnotation(Airport airport) { Airport = airport; }
public static double GetDistanceBetween(Airport depart, Airport arrive) { // Calculates distance between 2 locations on Earth using the Haversine formula. // http://www.movable-type.co.uk/scripts/latlong.html double distLongitude = ToRadians (arrive.Longitude - depart.Longitude); double distLatitude = ToRadians (arrive.Latitude - depart.Latitude); double a = Math.Pow (Math.Sin (distLatitude / 2), 2) + Math.Pow (Math.Sin (distLongitude / 2), 2) * Math.Cos (ToRadians (depart.Latitude)) * Math.Cos (ToRadians (arrive.Latitude)); double c = 2 * Math.Atan2 (Math.Sqrt (a), Math.Sqrt (1 - a)); return c * MeanEarthRadius / MetersPerNauticalMile; }
static bool TryParse(string entry, out Airport airport, ref AirportError error, ref AirportField field, ref string message) { string[] fields = entry.Split (new char[] { ':' }); int degrees, minutes, seconds, elevation; string alt; int neg; if (fields.Length != NumFields) { if (fields.Length < NumFields) { message = string.Format ("Airport entry is missing {0} fields.", NumFields - fields.Length); field = (AirportField) (NumFields - fields.Length); error = AirportError.NotEnoughFields; } else { message = string.Format ("Airport entry has too many fields."); error = AirportError.TooManyFields; field = AirportField.Elevation; } airport = null; return false; } airport = new Airport (); // Parse ICAO code if (!IsICAO (fields[0])) { message = string.Format ("Airport ICAO '{0}' code is invalid.", fields[0]); field = AirportField.ICAO; airport = null; return false; } else { airport.ICAO = fields[0]; } // Parse IATA code if (!IsIATA (fields[1])) { message = string.Format ("Airport IATA code '{0}' is invalid.", fields[1]); field = AirportField.IATA; airport = null; return false; } else if (fields[1] != "N/A") { airport.IATA = fields[1]; } else { airport.IATA = ""; } airport.Name = FixCapitalization (fields[2]); airport.City = FixCapitalization (fields[3]); airport.Country = fields[4] == "USA" ? "USA" : FixCapitalization (fields[4]); if (!Int32.TryParse (fields[5], out degrees) || degrees >= 180) { message = string.Format ("Airport Latitude (degrees) is invalid."); field = AirportField.LatitudeDegrees; airport = null; return false; } else if (!Int32.TryParse (fields[6], out minutes) || minutes >= 60) { message = string.Format ("Airport Latitude (minutes) is invalid."); field = AirportField.LatitudeMinutes; airport = null; return false; } else if (!Int32.TryParse (fields[7], out seconds) || seconds >= 60) { message = string.Format ("Airport Latitude (seconds) is invalid."); field = AirportField.LatitudeSeconds; airport = null; return false; } else if (!(fields[8] == "N" || fields[8] == "S" || fields[8] == "U")) { message = string.Format ("Airport Latitude direction is invalid."); field = AirportField.LatitudeDirection; airport = null; return false; } else { int sign = fields[8][0] == 'N' ? 1 : -1; airport.Latitude = (sign * (((degrees * 60) + minutes) * 60 + seconds)) / 3600.0; } if (!Int32.TryParse (fields[9], out degrees) || degrees >= 180) { message = string.Format ("Airport Longitude (degrees) is invalid."); field = AirportField.LongitudeDegrees; airport = null; return false; } else if (!Int32.TryParse (fields[10], out minutes) || minutes >= 60) { message = string.Format ("Airport Longitude (minutes) is invalid."); field = AirportField.LongitudeMinutes; airport = null; return false; } else if (!Int32.TryParse (fields[11], out seconds) || seconds >= 60) { message = string.Format ("Airport Longitude (seconds) is invalid."); field = AirportField.LongitudeSeconds; airport = null; return false; } else if (!(fields[12] == "E" || fields[12] == "W" || fields[12] == "U")) { message = string.Format ("Airport Longitude direction is invalid."); field = AirportField.LongitudeDirection; airport = null; return false; } else { int sign = fields[12][0] == 'E' ? 1 : -1; airport.Longitude = (sign * (((degrees * 60) + minutes) * 60 + seconds)) / 3600.0; } // Altitude can be below sealevel... if ((neg = fields[13].IndexOf ('-')) != -1) alt = fields[13].Substring (neg); else alt = fields[13]; if (!Int32.TryParse (alt, out elevation)) { message = string.Format ("Airport Altitude '{0}' is invalid.", fields[13]); field = AirportField.Elevation; airport = null; return false; } else { airport.Elevation = elevation; } return true; }
public static bool TryParse(string entry, out Airport airport) { AirportError error = AirportError.NotEnoughFields; AirportField field = AirportField.Elevation; string message = null; if (entry == null) { airport = null; return false; } return TryParse (entry, out airport, ref error, ref field, ref message); }
static bool TryParse(string entry, out Airport airport, ref AirportError error, ref AirportField field, ref string message) { string[] fields = entry.Split(new char[] { ':' }); int degrees, minutes, seconds, elevation; string alt; int neg; if (fields.Length != NumFields) { if (fields.Length < NumFields) { message = string.Format("Airport entry is missing {0} fields.", NumFields - fields.Length); field = (AirportField)(NumFields - fields.Length); error = AirportError.NotEnoughFields; } else { message = string.Format("Airport entry has too many fields."); error = AirportError.TooManyFields; field = AirportField.Elevation; } airport = null; return(false); } airport = new Airport(); // Parse ICAO code if (!IsICAO(fields[0])) { message = string.Format("Airport ICAO '{0}' code is invalid.", fields[0]); field = AirportField.ICAO; airport = null; return(false); } else { airport.ICAO = fields[0]; } // Parse IATA code if (!IsIATA(fields[1])) { message = string.Format("Airport IATA code '{0}' is invalid.", fields[1]); field = AirportField.IATA; airport = null; return(false); } else if (fields[1] != "N/A") { airport.IATA = fields[1]; } else { airport.IATA = ""; } airport.Name = FixCapitalization(fields[2]); airport.City = FixCapitalization(fields[3]); airport.Country = fields[4] == "USA" ? "USA" : FixCapitalization(fields[4]); if (!Int32.TryParse(fields[5], out degrees) || degrees >= 180) { message = string.Format("Airport Latitude (degrees) is invalid."); field = AirportField.LatitudeDegrees; airport = null; return(false); } else if (!Int32.TryParse(fields[6], out minutes) || minutes >= 60) { message = string.Format("Airport Latitude (minutes) is invalid."); field = AirportField.LatitudeMinutes; airport = null; return(false); } else if (!Int32.TryParse(fields[7], out seconds) || seconds >= 60) { message = string.Format("Airport Latitude (seconds) is invalid."); field = AirportField.LatitudeSeconds; airport = null; return(false); } else if (!(fields[8] == "N" || fields[8] == "S" || fields[8] == "U")) { message = string.Format("Airport Latitude direction is invalid."); field = AirportField.LatitudeDirection; airport = null; return(false); } else { int sign = fields[8][0] == 'N' ? 1 : -1; airport.Latitude = (sign * (((degrees * 60) + minutes) * 60 + seconds)) / 3600.0; } if (!Int32.TryParse(fields[9], out degrees) || degrees >= 180) { message = string.Format("Airport Longitude (degrees) is invalid."); field = AirportField.LongitudeDegrees; airport = null; return(false); } else if (!Int32.TryParse(fields[10], out minutes) || minutes >= 60) { message = string.Format("Airport Longitude (minutes) is invalid."); field = AirportField.LongitudeMinutes; airport = null; return(false); } else if (!Int32.TryParse(fields[11], out seconds) || seconds >= 60) { message = string.Format("Airport Longitude (seconds) is invalid."); field = AirportField.LongitudeSeconds; airport = null; return(false); } else if (!(fields[12] == "E" || fields[12] == "W" || fields[12] == "U")) { message = string.Format("Airport Longitude direction is invalid."); field = AirportField.LongitudeDirection; airport = null; return(false); } else { int sign = fields[12][0] == 'E' ? 1 : -1; airport.Longitude = (sign * (((degrees * 60) + minutes) * 60 + seconds)) / 3600.0; } // Altitude can be below sealevel... if ((neg = fields[13].IndexOf('-')) != -1) { alt = fields[13].Substring(neg); } else { alt = fields[13]; } if (!Int32.TryParse(alt, out elevation)) { message = string.Format("Airport Altitude '{0}' is invalid.", fields[13]); field = AirportField.Elevation; airport = null; return(false); } else { airport.Elevation = elevation; } return(true); }
public AirportAnnotation(Airport airport, CLLocationCoordinate2D userCoordinates) { UserCoordinates = userCoordinates; Airport = airport; }