public virtual void addRobotResult(RobotResult robotResult) { DataSetName name = robotResult.Data.Name; DataSetResult dataSetResult = getDataSetResult(name); dataSetResult.addResult(robotResult); }
public RobotAction[] NextStep(RobotManager robot, RobotResult result, int limit) { if (!robot.Map.Explored(robot.Position.Move((byte)robot.Rot + 2))) return new[] { RobotAction.Left }; var r = (byte)GetNextStepPoint(robot, limit); r -= (byte)robot.Rot; r += 4; r %= 4; if (r == 0) return new[] { RobotAction.Go }; if (r == 1) return new[] { RobotAction.Right, RobotAction.Go }; if (r == 2) return new[] { RobotAction.Left, RobotAction.Left, RobotAction.Go }; if (r == 3) return new[] { RobotAction.Left, RobotAction.Go }; throw new Exception("WTF"); }
public long PostRobot() { RobotResult robotResult = null; using (var client = new HttpClient()) { client.BaseAddress = new Uri(_kraftwerkAPIUrl); Task <HttpResponseMessage> postTask = client.PostAsync("api/robot", null); postTask.Wait(); HttpResponseMessage result = postTask.Result; if (result.IsSuccessStatusCode) { Task <RobotResult> readTask = result.Content.ReadAsAsync <RobotResult>(); readTask.Wait(); robotResult = readTask.Result; } } return(robotResult.Id); }
public RobotResult GetRobotsFileFromURL(string url) { UriBuilder ub = new UriBuilder(url); WebClient wc = new WebClient(); string robotsFile = ""; try { robotsFile = wc.DownloadString("http://" + ub.Host.ToString() + "/robots.txt"); } catch (Exception ex) { // No robots.txt file found return(null); } //Make list of disallowed links string[] lines = robotsFile.Split(new[] { "\r\n", "\r", "\n" }, StringSplitOptions.None); List <string> disallowedLinks = new List <string>(); List <string> allowedLinks = new List <string>(); int indexOfAllowedBot = 0; if (!robotsFile.Contains("User-agent: *")) { // Not allowed to crawl System.Diagnostics.Debug.WriteLine("Not allowed to crawl"); return(null); } else { indexOfAllowedBot = robotsFile.IndexOf("User-agent: *"); } for (int i = indexOfAllowedBot + 1; i < lines.Length; i++) { if (lines[i].Contains("Disallow:")) { //Get index of first slash int index = lines[i].IndexOf("/"); //Check if an asterix is there - removes it if yes string toAdd; if (lines[i].Contains("*")) { int indexOfA = lines[i].IndexOf("*"); toAdd = lines[i].Substring(index, indexOfA - index); } else { toAdd = lines[i].Substring(index); } disallowedLinks.Add(toAdd); } else if (lines[i].Contains("Allow:")) { //Get index of first slash int index = lines[i].IndexOf("/"); //Check if an asterix is there - removes it if yes string toAdd; if (lines[i].Contains("*")) { int indexOfA = lines[i].IndexOf("*"); toAdd = lines[i].Substring(index, indexOfA - index); } else { toAdd = lines[i].Substring(index); } allowedLinks.Add(toAdd); } if (lines[i].Contains("User-agent")) { break; } } var result = new RobotResult { allowedLinks = allowedLinks, disallowedLinks = disallowedLinks }; return(result); }