// OLD VERSION //public static XmlNode GetCamlUpdateBatch(this UpdateBatch updateBatch) //{ // XmlDocument doc = new XmlDocument(); // XmlElement batch = doc.CreateElement("Batch"); // batch.SetAttribute("OnError", updateBatch.ContinueOnError ? "Continue" : "Return"); // foreach (UpdateItem i in updateBatch) // batch.AppendChild(i.GetCamlUpdateItem(doc)); // return batch; //} public static XmlNode GetCamlUpdateBatch(this UpdateBatch updateBatch, out UpdateResults results) { XmlDocument doc = new XmlDocument(); XmlElement batch = doc.CreateElement("Batch"); batch.SetAttribute("OnError", updateBatch.ContinueOnError ? "Continue" : "Return"); results = null; foreach (UpdateItem i in updateBatch) { try { batch.AppendChild(i.GetCamlUpdateItem(doc)); } catch (Exception e) { if (results == null) { results = new UpdateResults(); } results.Add(new UpdateResult() { Command = i.Command, ErrorCode = e.Message, ErrorMessage = e.Message, ItemData = i.ChangedItemData, UpdateItemID = i.ID }); } } return(batch); }
/// <summary> /// Update hash with name if the hash can be found in the hash list /// This is used for generation purposes /// </summary> /// <param name="ph">ph value</param> /// <param name="sh">sh value</param> /// <param name="name">equivalent of the hash as a string</param> /// <param name="archiveName">the name of the archive in which to look / update</param> /// <returns>0=not found, 1=already up-to-date, 2= name updated, 3=archive updated</returns> public UpdateResults UpdateHash(uint ph, uint sh, string name, int crc, string archiveName) { long sig = (((long)ph) << 32) + sh; UpdateResults result = UpdateResults.NOT_FOUND; //if the list contains the sig, then we update if (hashList[archiveName].ContainsKey(sig)) { result = UpdateResults.UPTODATE; if (name != "" && hashList[archiveName][sig].filename != name) { // updates the filename if it has changed hashList[archiveName][sig].filename = name; result = UpdateResults.NAME_UPDATED; AddDirectory(name); AddFileandExtension(name); needsSave = true; } if (archiveName != hashList[archiveName][sig].archiveName) { // updates the archivename if the file has switched archive hashList[archiveName][sig].archiveName = archiveName; result = UpdateResults.ARCHIVE_UPDATED; needsSave = true; } if (crc != 0) { hashList[archiveName][sig].crc = crc; needsSave = true; } } return(result); }
/// <summary> /// Update hash with name if the hash can be found in the hash list /// This is used for generation purposes /// </summary> /// <param name="ph">ph value</param> /// <param name="sh">sh value</param> /// <param name="name">equivalent of the hash as a string</param> /// <returns>0=not found, 1=already up-to-date, 2= name updated</returns> public UpdateResults UpdateHash(uint ph, uint sh, string name, int crc) { long sig = (((long)ph) << 32) + sh; UpdateResults result = UpdateResults.NOT_FOUND; //if the list contains the sig, then we update if (hashList.ContainsKey(sig)) { result = UpdateResults.UPTODATE; if (name != "" && hashList[sig].filename != name) { hashList[sig].filename = name; result = UpdateResults.NAME_UPDATED; AddDirectory(name); AddFileandExtension(name); needsSave = true; } if (crc != 0) { hashList[sig].crc = crc; needsSave = true; } } return(result); }
public UpdateResults Update(string targetDir) { UpdateResults results = new UpdateResults(); // Download the RiverscapesXML repository manifest Directory.CreateDirectory(targetDir); string json_url = string.Format("{0}index.json", ResourceURL); string manifest_path = Path.Combine(targetDir, "index.json"); requestDownload(json_url, manifest_path); // Loop over business logic files in the mainfest and download those that have a different MD5 from those on GitHub using (StreamReader file = File.OpenText(manifest_path)) using (JsonTextReader reader = new JsonTextReader(file)) { JObject o2 = (JObject)JToken.ReadFrom(reader); Dictionary <string, string> mani = JsonConvert.DeserializeObject <Dictionary <string, string> >(o2.ToString()); results.business_logic = downloadManifestFiles(ResourceURL, mani, @"RaveBusinessLogic\/.*\.xml", targetDir); results.symbology_lyrs = downloadManifestFiles(ResourceURL, mani, @"Symbology\/esri\/.*\.lyr", targetDir); results.base_maps_xml = downloadManifestFiles(ResourceURL, mani, "BaseMaps.xml", targetDir); // Cleanup local files cleanupFiles(mani, targetDir, "RaveBusinessLogic", "*.xml"); cleanupFiles(mani, targetDir, @"Symbology\esri", "*.lyr"); } return(results); }
private void TVScoreButton_Click(object sender, EventArgs e) { MatchupModel m = (MatchupModel)TVScoringRoundListBox.SelectedItem; string errorMessage = ValidateScore(); if (errorMessage.Length > 0) { MessageBox.Show($"Input error: {errorMessage}"); TVScoringTeamOneTextBox.Text = "0"; TVScoringTeamTwoTextBox.Text = "0"; return; } for (int i = 0; i < m.Entries.Count; i++) { if (i == 0) { if (m.Entries[0].TeamCompeting != null) { bool scoreParsed = double.TryParse(TVScoringTeamOneTextBox.Text, out double teamOneScore); if (scoreParsed) { m.Entries[0].Score = teamOneScore; } else { MessageBox.Show("Please enter a valid score for the first team."); return; } } } if (i == 1) { if (m.Entries[1].TeamCompeting != null) { bool scoreParsed = double.TryParse(TVScoringTeamTwoTextBox.Text, out double teamTwoScore); if (scoreParsed) { m.Entries[1].Score = teamTwoScore; } else { MessageBox.Show("Please enter a valid score for the second team."); return; } } } } try { UpdateResults.UpdateTournamentResults(tournament); } catch (Exception ex) { MessageBox.Show($"The application encountered the following error: {ex.Message }"); return; } }
public void Update(DBMovieInfo movie) { List <DBSourceInfo> sources; lock (detailSources) sources = new List <DBSourceInfo>(detailSources); // unlock the movie fields for the first iteration movie.ProtectExistingValuesFromCopy(false); // first update from the primary source of this data int providerCount = 0; if (movie.PrimarySource != null && movie.PrimarySource.Provider != null) { UpdateResults success = movie.PrimarySource.Provider.Update(movie); logger.Debug("UPDATE: Title='{0}', Provider='{1}', Version={2}, Result={3}", movie.Title, movie.PrimarySource.Provider.Name, movie.PrimarySource.Provider.Version, success.ToString()); providerCount++; } foreach (DBSourceInfo currSource in sources) { if (movie.IsFullyPopulated()) { logger.Debug("UPDATE: All fields are populated. Done updating '" + movie.Title + "'."); break; } if (currSource.IsDisabled(DataType.DETAILS)) { continue; } if (currSource == movie.PrimarySource) { continue; } providerCount++; if (providerCount <= MovingPicturesCore.Settings.DataProviderRequestLimit || MovingPicturesCore.Settings.DataProviderRequestLimit == 0) { UpdateResults success = currSource.Provider.Update(movie); logger.Debug("UPDATE: Title='{0}', Provider='{1}', Version={2}, Result={3}", movie.Title, currSource.Provider.Name, currSource.Provider.Version, success.ToString()); } else { // stop update break; } if (MovingPicturesCore.Settings.UseTranslator) { movie.Translate(); } } }
/// <summary> /// Lookup in all the archives if a hash matches /// Update hash with name if the hash can be found in the hash list /// This is used for generation purposes /// </summary> /// <param name="ph">ph value</param> /// <param name="sh">sh value</param> /// <param name="name">equivalent of the hash as a string</param> /// <returns>0=not found, 1=already up-to-date, 2= name updated, 3=archive updated</returns> public UpdateResults UpdateHash(uint ph, uint sh, string name, int crc) { int i = 0; UpdateResults result = UpdateResults.NOT_FOUND; while (i < hashList.Count && (result = UpdateHash(ph, sh, name, crc, hashList.Keys[i])) == UpdateResults.NOT_FOUND) { i++; } return(result); }
public MainFrm() { InitializeComponent(); commandUpdateRes = new UpdateResults(this.UpdateCommandResults); monitorUpdateRes = new UpdateResults(this.UpdateMonitorResults); disconnectBtn.Enabled = false; ipTxt.Text = Properties.Settings.Default.Ip; portTxt.Text = Properties.Settings.Default.Port; }
public static UpdateResults GetCamlUpdateResults(this XElement xmlNode) { UpdateResults results = new UpdateResults(); XNamespace defaultNs = xmlNode.GetDefaultNamespace(); var resultList = xmlNode.Elements(defaultNs + "Result").Select(n => n.GetCamlUpdateResult()); foreach (UpdateResult r in resultList) { results.Add(r); } return(results); }
/// <summary> /// Update the track with the received data /// </summary> /// <param name="mvTrackInfo"></param> public void Update(DBTrackInfo mvTrackInfo) { List <DBSourceInfo> sources; lock (trackDetailSources) sources = new List <DBSourceInfo>(trackDetailSources); // unlock the mv fields for the first iteration mvTrackInfo.ProtectExistingValuesFromCopy(false); // first update from the primary source of this data int providerCount = 0; if (mvTrackInfo.PrimarySource != null && mvTrackInfo.PrimarySource.Provider != null) { UpdateResults success = mvTrackInfo.PrimarySource.Provider.UpdateTrack(mvTrackInfo); logger.Debug("*** UPDATE: Track='{0}', Provider='{1}', Version={2}, Result={3}", mvTrackInfo.Track, mvTrackInfo.PrimarySource.Provider.Name, mvTrackInfo.PrimarySource.Provider.Version, success.ToString()); providerCount++; } foreach (DBSourceInfo currSource in sources) { if (currSource.IsDisabled(DataType.TRACKDETAIL)) { continue; } if (currSource == mvTrackInfo.PrimarySource) { continue; } providerCount++; if (providerCount <= mvCentralCore.Settings.DataProviderRequestLimit || mvCentralCore.Settings.DataProviderRequestLimit == 0) { UpdateResults success = currSource.Provider.UpdateTrack(mvTrackInfo); logger.Debug("*** UPDATE: Track='{0}', Provider='{1}', Version={2}, Result={3}", mvTrackInfo.Track, currSource.Provider.Name, currSource.Provider.Version, success.ToString()); } else { // stop update break; } if (mvCentralCore.Settings.UseTranslator) { mvTrackInfo.Translate(); } } }
private void UpdateIngredientNutritionalInformation(UpdateObject update, ref UpdateResults results) { // need to get the xmlimportitem var xmlimport = GetXmlIngredientByGuid(update.xmlingredientguid.ToString()); if (xmlimport == null) { throw new ApplicationException("XmlImportIngredient not found"); } //need to blow out the previous facts GetDataBase().Execute(" update NutritionFacts set active = 0 where ingredientid =@0", update.ingredientid); //translate field to nutrition type foreach (var facttype in GetDataBase().Fetch <NutritionFactType>(" select * from NutritionFactTypes")) { try { var fact = GetNutritionFact(update.ingredientid, facttype.nutritionfacttypeid); if (fact == null) { fact = new NutritionFact(); fact.ingredientid = update.ingredientid; fact.nutritionfacttypeid = facttype.nutritionfacttypeid; fact.ServiceObjectId = new Guid(); fact.DataObjectId = new Guid(); } fact.active = true; fact.value = GetFactValueFromImport(xmlimport, facttype.nutritionfacttypeid); if (fact.IsNew()) { fact.Insert(); } else { fact.Update(); } results.NutrionalFactsUpdates++; } catch (Exception e) { results.Errors.Add("NutrionalFact:" + facttype.name + " for Ingredient:" + update.ingredientid, e.Message); } } }
public static bool UpdatesAvailable(string fileUrl, out List <UpdateData> updatesData, DownloadProgressChangedEventHandler callback) { WebClient client = new WebClient(); // Add a user agent header in case the // requested URI contains a query. client.Headers.Add("user-agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2; .NET CLR 1.0.3705;)"); client.DownloadStringCompleted += DownloadUpdatesXml; client.DownloadProgressChanged += callback; UpdateResults res = new UpdateResults(); client.DownloadStringAsync(new Uri(fileUrl), res); res.downloadComplete.WaitOne(); updatesData = res.updatesData; return(res.updatesAvailable); }
long TreatPatternLine(string line, Hasher warhash) { long result = 0; string[] spl_str = line.Replace("[0-9]", "|").Split('|'); string format = ""; int occurence = spl_str.Length - 1; UpdateResults updResult = UpdateResults.NOT_FOUND; if (occurence <= HashCreatorConfig.MaxCombinationPerPattern) //9 = max_int { long max = (long)Math.Pow(10, occurence); for (int i = 0; i < occurence; i++) { format += "0"; } for (long i = 0; i < max && active; i++) { string cur_i = i.ToString(format); string cur_str = ""; //creates the new filename for (int j = 0; j < occurence; j++) { cur_str += spl_str[j]; cur_str += cur_i[j]; } cur_str += spl_str[occurence]; warhash.Hash(cur_str, 0xDEADBEEF); // Thread-safe ??? updResult = patternTestHashDic.UpdateHash(warhash.ph, warhash.sh, cur_str, 0); if (updResult == UpdateResults.NAME_UPDATED || updResult == UpdateResults.ARCHIVE_UPDATED) { result++; } //string brute_str = ""; //brute_str = string.Format("{0:X8}#{1:X8}#{2}", (uint)(warhash.ph), (uint)(warhash.sh), cur_str); //AddBruteLine(brute_str); } } return(result); }
public ActionResult EditTournamentMatchup(MatchupMvcModel model) { try { GroupModel g = GlobalConfiguration.Connection.GetGroup(model.GroupId); if (ModelState.IsValid) { MatchupModel foundMatchup = new MatchupModel(); foreach (var round in g.Rounds) { foreach (var matchup in round) { if (matchup.Id == model.MatchupId) { foundMatchup = matchup; } } } for (int i = 0; i < foundMatchup.Entries.Count; i++) { if (i == 0) { foundMatchup.Entries[i].MapOneScore = model.FirstTeamScore; } else if (i == 1) { foundMatchup.Entries[i].MapOneScore = model.SecondTeamScore; } } UpdateResults.UpdateTournamentResults(g, model.TournamentId); } } catch (Exception) { // ignored } return(RedirectToAction("LeagueScoring", "Tournament", new { id = model.TournamentId, divisionId = model.DivisionId })); }
public MainFrm() { InitializeComponent(); logger = LogManager.GetLogger(typeof(MainFrm)); updateRes = new UpdateResults(this.UpdateResultsTxt); disconnectBtn.Enabled = false; commands = GetCommands(); foreach (MethodInfo command in commands) { commandsListBox.Items.Add(command.Name); } ipTxt.Text = Properties.Settings.Default.Ip; portTxt.Text = Properties.Settings.Default.Port; logger.Debug("Logger caricato"); }
public DataChecksForm() { #region Init Deligate functions /**************************************************************************/ _updateProgressBar = new UpdateProgressBar(updateProgressBarState); _updateResults = new UpdateResults(updateResults); _updateLogBox = new UpdateLogBox(writeLog); _clearBeforeRun = new ClearBeforeRun(clearOnStart); _setButton = new SetButtonVisibility(setButtonVisibility); _incCounter = new IncCounter(IncreaseCounter); _setLabelText = new SetLabelText(SetLabel); /**************************************************************************/ #endregion DataChecksModelView = new DataChecksModelView(); EventsHandlers = new Dictionary <string, Object>(); //Adding Service Events Handler functions #region Attaching Services Events EventsHandlers.Add(Const.EventsTypes.ParentOutcomeReportedEvent, new EventHandler(instance_OutcomeReported)); //EventsHandlers.Add(Const.EventsTypes.ChildOutcomeReportedEvent, new EventHandler(instance_OutcomeReported)); EventsHandlers.Add(Const.EventsTypes.ParentStateChangedEvent, new EventHandler <Edge.Core.Services.ServiceStateChangedEventArgs>(instance_StateChanged)); EventsHandlers.Add(Const.EventsTypes.ChildStateChangedEvent, new EventHandler <ServiceStateChangedEventArgs>(child_instance_StateChanged)); EventsHandlers.Add(Const.EventsTypes.ChildServiceRequested, new EventHandler <ServiceRequestedEventArgs>(instance_ChildServiceRequested)); #endregion InitializeComponent(); //Load Validation Types from configuration DataChecksModelView.LoadValidationTypesItems(this.ValidationTypes.Nodes); //Load Metrics Validations from configuration DataChecksModelView.LoadMetricsValidationsItems(this.MerticsValidations.Nodes); this.LogBox.Multiline = true; }
public UpdateResults UpdateMenu() { var UpdateResults = new UpdateResults(); var thelist = GetMatchedMenuItemIds(); //scope by menu id foreach (var menuid in thelist) { try { var updates = GetDynamicXrefQuery(menuid); GetDataBase().Execute("Delete from MenuItemIngredients where menuitemid = " + menuid); foreach (var update in updates) { UpdateIngredientXref(update); try { UpdateIngredientNutritionalInformation(update, ref UpdateResults); } catch (Exception e) { UpdateResults.Errors.Add("Ingredient Nutrition Update: " + update.ingredientid, e.Message); } UpdateResults.IngredientsUpdated++; } UpdateResults.MenuItemsUpdated++; } catch (Exception e) { UpdateResults.Errors.Add("Menu Item Update: " + menuid, e.Message); } } return(UpdateResults); }
// OLD VERSION //public static XmlNode GetCamlUpdateBatch(this UpdateBatch updateBatch) //{ // XmlDocument doc = new XmlDocument(); // XmlElement batch = doc.CreateElement("Batch"); // batch.SetAttribute("OnError", updateBatch.ContinueOnError ? "Continue" : "Return"); // foreach (UpdateItem i in updateBatch) // batch.AppendChild(i.GetCamlUpdateItem(doc)); // return batch; //} public static XmlNode GetCamlUpdateBatch(this UpdateBatch updateBatch, out UpdateResults results) { XmlDocument doc = new XmlDocument(); XmlElement batch = doc.CreateElement("Batch"); batch.SetAttribute("OnError", updateBatch.ContinueOnError ? "Continue" : "Return"); results = null; foreach (UpdateItem i in updateBatch) try { batch.AppendChild(i.GetCamlUpdateItem(doc)); } catch (Exception e) { if (results == null) results = new UpdateResults(); results.Add(new UpdateResult() { Command = i.Command, ErrorCode = e.Message, ErrorMessage = e.Message, ItemData = i.ChangedItemData, UpdateItemID = i.ID }); } return batch; }
private static void DownloadUpdatesXml(Object sender, DownloadStringCompletedEventArgs e) { UpdateResults results = e.UserState as UpdateResults; if (e.Cancelled || e.Error != null) { results.downloadComplete.Set(); return; } string xml = e.Result; try { XmlDocument updateXml = new XmlDocument(); updateXml.LoadXml(xml); var nl = updateXml.GetElementsByTagName("Pack"); if (nl.Count > 0) { XmlElement el = nl.Item(0) as XmlElement; if (el == null) { return; // false; } string version = el.Attributes["Version"].Value; //HACK: For fixing wrong version number if (version == "1.99.9999") { version = DriverVersion.VERSION; } if (String.Compare(version, DriverVersion.VERSION) > 0) { var files = el.GetElementsByTagName("Files"); if (files.Count == 0) { results.downloadComplete.Set(); return; } el = files.Item(0) as XmlElement; if (el == null) { results.downloadComplete.Set(); return; } results.updatesData = new List <UpdateData>(); var fs = el.GetElementsByTagName("File"); foreach (var node in fs) { el = node as XmlElement; if (el == null) { continue; } string src = el.Attributes["Src"].Value; string dst = el.Attributes["Dest"].Value; var ud = new UpdateData(src, dst); results.updatesData.Add(ud); } if (results.updatesData.Count > 0) { results.updatesAvailable = true; } results.downloadComplete.Set(); return; } } //BeginInvoke(new InvokeDelegate(ShowUpdateMessage)); } catch (Exception ex) { Debug.WriteLine(ex.Message); } results.downloadComplete.Set(); }
/// <summary> /// /// </summary> /// <param name="fileNameFile"></param> /// <param name="dirNameFile"></param> /// <param name="extNameFile"></param> /// <returns></returns> public long ParseDirFilenamesExt(string fileNameFile, string dirNameFile, string extNameFile) { Start(); //hashDic.CreateHelpers(); long result = 0; if (File.Exists(fileNameFile) && File.Exists(dirNameFile)) { Hasher warhash = new Hasher(hasherType); UpdateResults found = UpdateResults.NOT_FOUND; //fileoutput string outputFileRoot = Path.GetDirectoryName(fileNameFile) + "/" + Path.GetFileNameWithoutExtension(fileNameFile); FileStream ofsFound = new FileStream(outputFileRoot + "-found.txt", FileMode.Create); FileStream ofsNotFound = new FileStream(outputFileRoot + "-notfound.txt", FileMode.Create); StreamWriter swf = new StreamWriter(ofsFound); StreamWriter swnf = new StreamWriter(ofsNotFound); //Read the file FileStream fs = new FileStream(fileNameFile, FileMode.Open); StreamReader fs_reader = new StreamReader(fs); FileStream ds = new FileStream(dirNameFile, FileMode.Open); StreamReader ds_reader = new StreamReader(ds); FileStream es = new FileStream(extNameFile, FileMode.Open); StreamReader es_reader = new StreamReader(es); HashSet <string> fileList = new HashSet <string>(); HashSet <string> dirList = new HashSet <string>(); HashSet <string> extList = new HashSet <string>(); string line; while ((line = ds_reader.ReadLine()) != null) { dirList.Add(line.ToLower().Replace('\\', '/').Replace("//", "/")); } ds_reader.Close(); ds.Close(); while ((line = es_reader.ReadLine()) != null) { extList.Add(line.ToLower().Replace('\\', '/').Replace("//", "/")); } es_reader.Close(); es.Close(); string tempExt = ""; while ((line = fs_reader.ReadLine()) != null) { tempExt = ""; if (line.Contains(".")) { tempExt = line.Substring(line.LastIndexOf('.') + 1); } if (extList.Contains(tempExt)) { line = line.Substring(0, line.LastIndexOf('.')); } else if (tempExt != "") { // extList.Add(tempExt); } fileList.Add(line.ToLower().Replace('\\', '/').Replace("//", "/")); } fs_reader.Close(); fs.Close(); // strip input file from duplicates. File.Delete(fileNameFile); fs = new FileStream(fileNameFile, FileMode.Create); StreamWriter fs_writer = new StreamWriter(fs); foreach (string file in fileList) { fs_writer.WriteLine(file); } fs_writer.Close(); fs.Close(); // strip input dir file from duplicates. File.Delete(dirNameFile); ds = new FileStream(dirNameFile, FileMode.Create); StreamWriter ds_writer = new StreamWriter(ds); foreach (string dir in dirList) { ds_writer.WriteLine(dir); } ds_writer.Close(); ds.Close(); // strip input ext file from duplicates. File.Delete(extNameFile); es = new FileStream(extNameFile, FileMode.Create); StreamWriter es_writer = new StreamWriter(es); foreach (string ext in extList) { es_writer.WriteLine(ext); } es_writer.Close(); es.Close(); //generate the whole dir / filename listing possible foreach (string d in dirList) { foreach (string f in fileList) { foreach (string e in extList) { line = d + '/' + f + "." + e; line = line.Replace("//", "/").Replace("//", "/"); // fullFileList.Add(line); warhash.Hash(line, 0xDEADBEEF); found = hashDic.UpdateHash(warhash.ph, warhash.sh, line, 0); if (found == UpdateResults.NAME_UPDATED || found == UpdateResults.ARCHIVE_UPDATED) { result++; swf.WriteLine(line); } else if (found == UpdateResults.NOT_FOUND) { //swnf.WriteLine("{0:X8}" + HashDictionary.hashSeparator // + "{1:X8}" + HashDictionary.hashSeparator // + "{2}", warhash.ph, warhash.sh, file.Key); //swnf.WriteLine(line); } } } } swnf.Close(); swf.Close(); ofsFound.Close(); ofsNotFound.Close(); } return(result); }
private void calc(object parameter) { Hasher warhash = new Hasher(hasherType); HashSet <string> dirList = ((ThreadParam)parameter).dirList; HashSet <string> filenameList = ((ThreadParam)parameter).filenameList; HashSet <string> extensionList = ((ThreadParam)parameter).extensionList; int jstart = ((ThreadParam)parameter).jstart; int jend = ((ThreadParam)parameter).jend; string outputFileRoot = ((ThreadParam)parameter).outputFileRoot; long filenamesFoundinThread = 0; //string[] dirListPart; //if (dirList.Count != 0) //{ // dirListPart = new string[dirList.Count]; // dirList.CopyTo(dirListPart); // for (int j = jstart; j < jend; j++) // dirListPart[j] += '/'; //} //else //{ // dirListPart = new String[1]; // dirListPart.SetValue("", 0); // jstart = 0; // jend = 1; //} if (extensionList.Count == 0) { extensionList.Add(""); } string directoryName; // get the directory name from the pool // Also allows for a cleaner exit if necessary through the Stop method while ((directoryName = GetDirectoryFromPoolManager()) != null) { foreach (string filename in filenameList) { foreach (string extension in extensionList) { string cur_str = directoryName + filename; // We may have a problem with files ending with '.' ? if (extension.CompareTo("") != 0) { cur_str += "." + extension; } cur_str = cur_str.Replace('\\', '/').ToLower(); warhash.Hash(cur_str, 0xDEADBEEF); // not that sure if UpdateHash is really Thread Safe... UpdateResults found = hashDic.UpdateHash(warhash.ph, warhash.sh, cur_str, 0); if (found == UpdateResults.NAME_UPDATED || found == UpdateResults.ARCHIVE_UPDATED) { filenamesFoundinThread++; } if (outputFileRoot != null) { if (found != UpdateResults.NOT_FOUND) { lock (lock_filefound) // may move this lock to the end of the loop. { foundNames[filename] = true; } } } } } TriggerFilenameTestEvent(new MYPFilenameTestEventArgs(Event_FilenameTestType.TestRunning, extensionList.Count)); } if (filenamesFoundinThread != 0) { lock (lock_filefound) { filenamesFoundInTest += filenamesFoundinThread; } } }
/// <summary> /// Tries all filenames (complete path) included in the fullFileNameFile file. /// </summary> /// <param name="fullFileNameFile"></param> /// <returns> number of newly found filenames</returns> public long ParseFilenames(string fullFileNameFile) { Start(); hashDic.CreateHelpers(); long result = 0; if (File.Exists(fullFileNameFile)) { Hasher warhash = new Hasher(hasherType); //Read the file FileStream fs = new FileStream(fullFileNameFile, FileMode.Open); StreamReader reader = new StreamReader(fs); HashSet <string> fileList = new HashSet <string>(); string line; while ((line = reader.ReadLine()) != null) { fileList.Add(line.ToLower().Replace('\\', '/')); } reader.Close(); fs.Close(); // strip input file from duplicates. File.Delete(fullFileNameFile); fs = new FileStream(fullFileNameFile, FileMode.Create); StreamWriter writer = new StreamWriter(fs); foreach (string file in fileList) { writer.WriteLine(file); } writer.Close(); fs.Close(); foundNames = new Dictionary <string, bool>(); foreach (string fn in fileList) { foundNames[fn] = false; } //Just in case someday we want to multi thread. parseFileList = fileList.GetEnumerator(); string filename; while ((filename = GetFileName_ParseFilenames()) != null) { warhash.Hash(filename, 0xDEADBEEF); UpdateResults found = hashDic.UpdateHash(warhash.ph, warhash.sh, filename, 0); if (found == UpdateResults.NAME_UPDATED || found == UpdateResults.ARCHIVE_UPDATED) { result++; } if (found != UpdateResults.NOT_FOUND) { foundNames[filename] = true; } } if (active) { string outputFileRoot = Path.GetDirectoryName(fullFileNameFile) + "/" + Path.GetFileNameWithoutExtension(fullFileNameFile); FileStream ofsFound = new FileStream(outputFileRoot + "-found.txt", FileMode.Create); FileStream ofsNotFound = new FileStream(outputFileRoot + "-notfound.txt", FileMode.Create); StreamWriter swf = new StreamWriter(ofsFound); StreamWriter swnf = new StreamWriter(ofsNotFound); foreach (KeyValuePair <string, Boolean> file in foundNames) { if (file.Value == true) { warhash.Hash(file.Key, 0xDEADBEEF); swf.WriteLine("{0:X8}" + HashDictionary.hashSeparator + "{1:X8}" + HashDictionary.hashSeparator + "{2}", warhash.ph, warhash.sh, file.Key); } else { //this is a quick and dirty fix to get some more debug info // to be removed in the future !!! warhash.Hash(file.Key, 0xDEADBEEF); //swnf.WriteLine("{0:X8}" + HashDictionary.hashSeparator // + "{1:X8}" + HashDictionary.hashSeparator // + "{2}", warhash.ph, warhash.sh, file.Key); swnf.WriteLine(file.Key); } } swnf.Close(); swf.Close(); ofsFound.Close(); ofsNotFound.Close(); } } return(result); }
public UpdateResults UpdateListItems(string listName, UpdateBatch updateBatch) { if (String.IsNullOrEmpty(listName)) throw new ArgumentNullException("listName"); if (updateBatch.Count == 0) throw new ArgumentException("Batch contains no updates", "updateBatch"); // OLD VERSION //XmlNode response = listService.UpdateListItems(listName, updateBatch.GetCamlUpdateBatch()); //return response.GetXElement().GetCamlUpdateResults(); //NEW UpdateResults results = null; var batch = updateBatch.GetCamlUpdateBatch(out results); if (batch.ChildNodes.Count > 0) { try { XmlNode response = listService.UpdateListItems(listName, batch); var serverResults = response.GetXElement().GetCamlUpdateResults(); if (serverResults == null) return results; if (results != null) foreach (var r in results) serverResults.Add(r); return serverResults; } catch (Exception ex) { //(ex as System.Net.WebException).Status = Timeout , Internalstatus = RequestFatal , Response = null //connection exception UpdateResults errorresults = new UpdateResults(); foreach (UpdateItem item in updateBatch) errorresults.Add(new UpdateResult() { ErrorCode = UpdateResult.GenericError, ErrorMessage = "Connection Timeout", Command = item.Command, ItemData = item.ChangedItemData, UpdateItemID = item.ID }); return errorresults; } } return results; //END OF NEW }
/// <summary> /// /// </summary> /// <param name="inserts"></param> /// <param name="updates"></param> /// <param name="deletes"></param> /// <param name="connection"></param> /// #UPLOAD 3 public void Update(DataTable changes, SpConnection connection, out Collection <SyncConflict> errors) { errors = new Collection <SyncConflict>(); if (changes == null) { throw new ArgumentNullException("changes"); } if (connection == null) { throw new ArgumentNullException("connection"); } int _batchSize = 25; int segmentsCount = (int)Math.Round(Math.Ceiling((double)changes.Rows.Count / _batchSize), 0); if (IgnoreColumnsOnUpdate != null) { // case to be handled // cannot remove Sharepoint ID. // cannot remove Primary Key of DataTable? foreach (string ignoredColumn in IgnoreColumnsOnUpdate) { string clientColumn = GetClientColumnFromServerColumn(ignoredColumn); if (clientColumn != null && changes.Columns.Contains(clientColumn)) { changes.Columns.Remove(clientColumn); } } } DataTable changesTotal = changes.Copy(); for (int i = 0; i < segmentsCount; i++) { changes.Rows.Clear(); CopyRows(changesTotal, changes, i * _batchSize, _batchSize); //SEND SEGMENT UpdateBatch batch = new UpdateBatch(); string clientIdColumn = GetClientColumnFromServerColumn("ID"); if (!changes.Columns.Contains(clientIdColumn)) { throw new InvalidOperationException(String.Format(CultureInfo.CurrentCulture, Messages.ColumnIDNotContained, clientIdColumn)); } IDictionary <int, DataRow> IdMapping = new Dictionary <int, DataRow>(); foreach (DataRow row in changes.Rows) { UpdateItem u = batch.CreateNewItem(); switch (row.RowState) { case DataRowState.Added: u.Command = UpdateCommands.Insert; break; case DataRowState.Deleted: u.Command = UpdateCommands.Delete; break; case DataRowState.Modified: u.Command = UpdateCommands.Update; break; case DataRowState.Unchanged: continue; } if (u.Command == UpdateCommands.Delete) { row.RejectChanges(); } if (u.Command != UpdateCommands.Insert) { if (!(row[clientIdColumn] is DBNull)) { u.ListItemID = (int)row[clientIdColumn]; } else { continue; } } if (u.Command != UpdateCommands.Delete) { ListItem item = new ListItem(); Exception e; MapDataRowToListItem(row, item, out e); u.ChangedItemData = item; if (e != null && SyncTracer.IsErrorEnabled()) { SyncTracer.Error(e.ToString()); } } batch.Add(u); IdMapping[u.ID] = row; if (u.Command == UpdateCommands.Delete) { row.Delete(); } } if (batch.Count != 0) { //try //{ UpdateResults results = connection.UpdateListItems(this.ListName, batch); // FIX: errors must be handled appropriately foreach (UpdateResult r in results) { if (!r.IsSuccess()) { if (!IdMapping.ContainsKey(r.UpdateItemID)) { throw new InvalidOperationException( String.Format(CultureInfo.CurrentCulture, Messages.NoIDMapping, r.UpdateItemID)); } DataRow clientRow = IdMapping[r.UpdateItemID]; errors.Add(CreateSyncError(r, clientRow)); } } //} //catch (Exception ex) //{ ////usually connection error // foreach (UpdateItem item in batch) // { // if (!IdMapping.ContainsKey(item.ID)) // throw new InvalidOperationException( // String.Format(CultureInfo.CurrentCulture, Messages.NoIDMapping, r.UpdateItemID)); // DataRow clientRow = IdMapping[item.ID]; // errors.Add(CreateSyncError(new UpdateResult(, clientRow)); // } //} } //END SEND SEGMENT } if (errors.Count == 0) { errors = null; } }
public UpdateResults UpdateListItems(string listName, UpdateBatch updateBatch) { if (String.IsNullOrEmpty(listName)) { throw new ArgumentNullException("listName"); } if (updateBatch.Count == 0) { throw new ArgumentException("Batch contains no updates", "updateBatch"); } // OLD VERSION //XmlNode response = listService.UpdateListItems(listName, updateBatch.GetCamlUpdateBatch()); //return response.GetXElement().GetCamlUpdateResults(); //NEW UpdateResults results = null; var batch = updateBatch.GetCamlUpdateBatch(out results); if (batch.ChildNodes.Count > 0) { try { XmlNode response = listService.UpdateListItems(listName, batch); var serverResults = response.GetXElement().GetCamlUpdateResults(); if (serverResults == null) { return(results); } if (results != null) { foreach (var r in results) { serverResults.Add(r); } } return(serverResults); } catch (Exception ex) { //(ex as System.Net.WebException).Status = Timeout , Internalstatus = RequestFatal , Response = null //connection exception UpdateResults errorresults = new UpdateResults(); foreach (UpdateItem item in updateBatch) { errorresults.Add(new UpdateResult() { ErrorCode = UpdateResult.GenericError, ErrorMessage = "Connection Timeout", Command = item.Command, ItemData = item.ChangedItemData, UpdateItemID = item.ID }); } return(errorresults); } } return(results); //END OF NEW }
public UpdateResults UpdateVersions( IEnumerable <string> manifestFiles, string defaultConfigFile, IEnumerable <Regex> whitelistedPackages, ImmutableHashSet <string>?packagesToIgnore, string?accessToken = null, string?propsFilesRootDirectory = null, Predicate <Build>?buildFilter = null) { List <Asset> assets = new List <Asset>(); DefaultConfigUpdater configUpdater; if (!TryLoadDefaultConfig(defaultConfigFile, out configUpdater, out string details)) { return(new UpdateResults { OutcomeDetails = details, IgnoredNuGets = packagesToIgnore }); } foreach (var manifestFile in manifestFiles) { if (!TryValidateManifestFile(manifestFile, out details) || !TryExtractManifestAssets(manifestFile, buildFilter, assets, out details)) { return(new UpdateResults { OutcomeDetails = details, IgnoredNuGets = packagesToIgnore }); } } UpdateResults results = new UpdateResults { IgnoredNuGets = packagesToIgnore }; Stopwatch overallRunStopWatch = Stopwatch.StartNew(); using CancellationTokenSource source = new CancellationTokenSource(_maxWaitDuration); try { _ = Parallel.ForEach(assets, CreateParallelOptions(source.Token), asset => ParallelCallback(asset, whitelistedPackages, packagesToIgnore, configUpdater, results)); /* Delay saving config file changes until props file updates are successful. * If we save the results now and props-file step fails, re-running the application won't attempt to update props files again. * A partial success in the app shouldn't hide the errors in the consecutive runs. */ // Only update props files if user specified an access token. Null token means user doesn't want to update props files. bool propsUpdatesEnabled = accessToken != null; if (propsUpdatesEnabled) { // Attempt to find a proper directory to search for props files, if we are not already given one. if (string.IsNullOrWhiteSpace(propsFilesRootDirectory)) { if (FindPropsFileRootDirectory(defaultConfigFile, out propsFilesRootDirectory)) { Trace.WriteLine($"The directory to search for .props files: {propsFilesRootDirectory}"); } else { Trace.WriteLine("Failed to find an appropriate folder to search for .props files."); results.PropsFileUpdateResults = new PropsUpdateResults() { Outcome = false, OutcomeDetails = "Failed to find an appropriate folder to search for .props files." }; } } // Update props files if we have a valid directory to search if (!string.IsNullOrWhiteSpace(propsFilesRootDirectory)) { SwrFileReader swrFileReader = new SwrFileReader(_maxConcurrentWorkers); SwrFile[] swrFiles = swrFileReader.LoadSwrFiles(propsFilesRootDirectory); PropsVariableDeducer variableDeducer = new PropsVariableDeducer(InsertionConstants.DefaultNugetFeed, accessToken); bool deduceOperationResult = variableDeducer.DeduceVariableValues(configUpdater, results.UpdatedNuGets, swrFiles, out List <PropsFileVariableReference> variables, out string outcomeDetails, _maxDownloadDuration); PropsFileUpdater propsFileUpdater = new PropsFileUpdater(); results.PropsFileUpdateResults = propsFileUpdater.UpdatePropsFiles(variables, propsFilesRootDirectory); if (!deduceOperationResult) { results.PropsFileUpdateResults.Outcome = false; results.PropsFileUpdateResults.OutcomeDetails += outcomeDetails; } } } else { Trace.WriteLine(".props file updates are skipped since no access token was specified."); } if (!propsUpdatesEnabled || results.PropsFileUpdateResults !.Outcome == true) { // Prop files were updated successfuly. It is safe to save config update results. results.FileSaveResults = configUpdater.Save(); } else { Trace.WriteLine("default.config and .packageconfig file updates were skipped, because " + "there was an issue updating .props files."); results.FileSaveResults = new FileSaveResult[0]; results.OutcomeDetails += "Failure in updating .props files."; } }
public static UpdateResults GetCamlUpdateResults(this XElement xmlNode) { UpdateResults results = new UpdateResults(); XNamespace defaultNs = xmlNode.GetDefaultNamespace(); var resultList = xmlNode.Elements(defaultNs + "Result").Select(n => n.GetCamlUpdateResult()); foreach (UpdateResult r in resultList) results.Add(r); return results; }
public void DoUpdate(IEnumerable <MimeMessage> emailList) { var extractor = new ResignInfoExtractor(); _logger.Log($"Parsing - total emails: {emailList.Count()}"); foreach (var email in emailList) { var extractResult = extractor.ExtractResignForm(email.HtmlBody, out var resign, out var errorMess); if (extractResult == ParseResult.Parsed_Info_Error) { //error _logger.Log($"Parsing - {email.Subject}: form error -> {errorMess}"); UpdateResults.Add(MakeRow(email.Subject, email.Date.DateTime.ToString(DateStringFormat), string.Empty, errorMess, Code.E.ToString())); //next email continue; } if (extractResult == ParseResult.OK) { resign.ReceiveDate = email.Date.DateTime; var dbResult = Adapter.UpsertRecordIfNewer(resign, out var dbError); switch (dbResult) { case DbResult.Insert: Logic.IgnoreList.Add(resign); UpdateResults.Add(MakeRow(email.Subject, email.Date.DateTime.ToString(DateStringFormat), resign.ResignDay.ToString(DateStringFormat), dbResult.ToString(), Code.I.ToString())); break; case DbResult.Update: Logic.IgnoreList.Add(resign); UpdateResults.Add(MakeRow(email.Subject, email.Date.DateTime.ToString(DateStringFormat), resign.ResignDay.ToString(DateStringFormat), dbResult.ToString(), Code.I.ToString())); break; case DbResult.Older: UpdateResults.Add(MakeRow(email.Subject, email.Date.DateTime.ToString(DateStringFormat), string.Empty, dbResult.ToString(), Code.I.ToString())); break; case DbResult.Erorr: //never seen this happens UpdateResults.Add(MakeRow(email.Subject, email.Date.DateTime.ToString(DateStringFormat), string.Empty, dbError, Code.E.ToString())); break; default: throw new InvalidProgramException(); } _logger.Log($"Parsing - {email.Subject}: OK -> DB: {dbError}"); } if (extractResult == ParseResult.Not_Resign_Email) { _logger.Log($"Parsing - {email.Subject}: probly not resign email"); UpdateResults.Add(MakeRow(email.Subject, email.Date.DateTime.ToString(DateStringFormat), string.Empty, errorMess, Code.I.ToString())); } } //sort base on error mess UpdateResults.Sort((item1, item2) => string.Compare(item1.Last(), item2.Last(), StringComparison.Ordinal)); }