public async Task <IActionResult> Index() { AddJS("robotsJS", "/admin/js/robots.js"); string robotPath = Path.Combine(CurrentHostEnvironment.ContentRootPath, "robots.txt"); Robots robots = new Robots(); if (System.IO.File.Exists(robotPath)) { using StreamReader sr = new StreamReader(robotPath); string contents = await sr.ReadToEndAsync(); if (contents.Length > 0) { string line1 = System.IO.File.ReadLines(robotPath)?.First(); switch (line1) { case "": ActionMessage("no file type of robot.txt", MessageType.Error); return(new EmptyResult()); case "#full": { robots.FileType = "full"; using StreamReader readFrom = new StreamReader(robotPath); robots.FullText = await readFrom.ReadToEndAsync(); robots.FullText = robots.FullText.Replace("#full", string.Empty); } break; case "#partial": { List <string> disallowedList = new List <string>(); robots.FileType = "partial"; using StreamReader readFrom = new StreamReader(robotPath); while (!readFrom.EndOfStream) { string line = await readFrom.ReadLineAsync(); if (line.StartsWith("Disallow:")) { disallowedList.Add(line.Replace("Disallow:", string.Empty).Trim()); } else if (line.StartsWith("User-Agent:")) { robots.UserAgent = line.Replace("User-Agent:", string.Empty).Trim(); } } robots.Disallowed = string.Join(",", disallowedList); robots.DisallowedList = disallowedList; WebBuilderController webBuilderController = new WebBuilderController(); ViewBag.Pages = await webBuilderController.GetAllPageList(GetSiteID); } break; default: ActionMessage("Robots.txt doesnot mathc any criteria", MessageType.Error); return(new EmptyResult()); } } else { ActionMessage("Robots.txt file is empty", MessageType.Error); return(new EmptyResult()); } } else { ActionMessage("Robots.txt file not found", MessageType.Error); return(new EmptyResult()); } return(View(robots)); }