public static IndexSettings Deserialize(string xml) { var returnVal = new IndexSettings(); XmlSerializer serial = new XmlSerializer(returnVal.GetType()); StringReader reader = new StringReader(xml); returnVal = (IndexSettings) serial.Deserialize(reader); return returnVal; }
private void UpdateSolrIndexForProject(IndexSettings settings, ISolrOperations<CodeDocument> solr, Project proj) { List<string> alldocs = null; //find out if directory exists before doing anything to the index if (!Directory.Exists(proj.Path)) { Console.WriteLine(DateTime.Now.ToString() + ": Directory for project " + proj.ProjectName + " did not exist, skipping"); return; } //find all of the files using (var csw = new ConsoleStopWatch("")) { alldocs = GetDocsForProject(proj, settings.DefaultIncludedPath, settings.DefaultExcludedPath); csw.Name = "Finding " + alldocs.Count.ToString() + " files for project " + proj.ProjectName; } using (var csw = new ConsoleStopWatch("Deleting all solr docs for project " + proj.ProjectName)) { solr.Delete(new SolrQuery("project:\"" + proj.ProjectName + "\"")); solr.Commit(); } //breakout the file list into chunks of DOCS_PER_POST for speed. One at a time is too slow, too many can cause solr memory and thread issues var fileChunks = Chunkify(alldocs.ToArray(), DOCS_PER_POST); using (var csw = new ConsoleStopWatch("Adding the documents to solr for project " + proj.ProjectName)) { //convert each to a solr document for (int i = 0; i < fileChunks.Length; i++) { var file = fileChunks[i]; var codedocs = MakeDocument(file, proj); //submit each to solr //Tweak to leverage new CommitIWithin option of SolrNet so that we do not need to pay the cost of a commit for each group. solr.AddRange(codedocs, new AddParameters { CommitWithin = 10000 }); } solr.Optimize(); } }