/// <summary> /// Increments targeted ProgressBar. /// </summary> /// <param name="amount">Optional. Amount to increment bar by. Defaults to 1.</param> public void IncrementBar(int amount = 1) { if (Strip.InvokeRequired) { Strip.BeginInvoke(new Action(() => Progbar.Increment(amount))); } else { Progbar.Increment(amount); } }
public void BehaviorIncrement() { ToolStripProgressBar tsi = new ToolStripProgressBar(); tsi.Increment(14); Assert.AreEqual(14, tsi.Value, "B1"); tsi.Increment(104); Assert.AreEqual(100, tsi.Value, "B2"); tsi.Increment(-245); Assert.AreEqual(0, tsi.Value, "B3"); }
/// ------------------------------------------------------------------------------------ public void BuildWordCache(ToolStripProgressBar progBar) { FindWordInitialGeneratedAmbigousSequences(); var tmpWordCache = new WordCache(); var recEntryParser = new RecordEntryParser(_phoneticFieldName, TempRecordCache.Add); foreach (var entry in this) { if (progBar != null) { progBar.Increment(1); } //// A record entry doesn't need parsing if it came from a PAXML data source //// or from an SA data source. //// In that case, a word cache entry only needs to have two things done here: //// 1) have its owning record entry set and 2) it needs to be added to the //// word cache. if (entry.NeedsParsing) { recEntryParser.ParseEntry(entry); } foreach (var wentry in entry.WordEntries) { wentry.RecordEntry = entry; tmpWordCache.Add(wentry); } } UnfilteredPhoneCache = GetPhonesFromWordCache(tmpWordCache); FindOtherGeneratedAmbiguousSequences(); SearchEngine.PhoneCache = UnfilteredPhoneCache; BuildFilteredWordCache(tmpWordCache); }
private static void loadMaps() { Limits l = new Limits(); int x; mnuStatus.Text = "Creating Map Storage"; mnuLoad.Increment(-1 * hgt * wid); l.clr = new int[wid][]; l.lyr = new int[wid][]; l.map = new Block[wid][]; for (x = 0; x < wid; x++) { l.clr[x] = new int[hgt]; l.lyr[x] = new int[hgt]; l.map[x] = new Block[hgt]; mnuLoad.Increment(hgt); } mnuLoad.Increment(-hgt * wid); mnuStatus.Text = "Not Loading"; Tools.Tool.finishLimits(l); }
//! incremenat progressbar by one /*! * \param ToolStripProgressBar progress bar */ public static void incrementProgressBar(ToolStripProgressBar bar) { bar.Increment(1); bar.ProgressBar.Refresh(); if (bar.Value >= bar.Maximum) { bar.Value = 0; } }
private void MoveProgressBarr(ToolStripProgressBar el) { el.Value = 0; el.Step = 5; el.Maximum = 100; while (el.Value != toolStripProgressBar1.Maximum) { el.Increment(toolStripProgressBar1.Step); System.Threading.Thread.Sleep(100); } }
public void Step(int nStepAmt) { m_progressBar.Increment(nStepAmt); }
/// <summary> /// Loads a .mod file from given file and returns a nullable boolean (True, null, False). /// </summary> /// <param name="file">.mod file to load.</param> /// <param name="modCount">REF: Total number of jobs loaded.</param> /// <param name="progbar">ProgressBar to increment/change during method.</param> /// <param name="ExternalCall">If true, certain functions are disabled/automated.</param> /// <returns>True if update is to be done automatically, false if not, and null if user requests to stop loading .mod.</returns> public static bool?LoadDotMod(string file, ref int modCount, ToolStripProgressBar progbar, bool ExternalCall) { bool AutoUpdate = false; // KFreon: Load from file using (FileStream fs = new FileStream(file, FileMode.Open, FileAccess.Read)) { // KFreon: Attempt to get version fs.Seek(0, SeekOrigin.Begin); int versionLength = fs.ReadValueS32(); long countOffset = fs.Seek(0, SeekOrigin.Current); // Just in case string version = ""; int count = -1; string ExecutingVersion = null; bool validVersion = false; if (versionLength > 20) // KFreon: Version is definitely wrong { ExecutingVersion = ""; } else { // KFreon: Do version checking for (int i = 0; i < versionLength; i++) { version += (char)fs.ReadByte(); } // KFreon: Get Executing Version and check validity of read .mod version string vers; ExecutingVersion = GetVersion(version, out vers, out validVersion); version = vers; count = fs.ReadValueS32(); // KFreon: Check if update required if (version != ExecutingVersion) { if (ExternalCall) { AutoUpdate = true; } } else // KFreon: Reset to null to signify success { ExecutingVersion = null; } } // KFreon: Ask what to do about version if (ExecutingVersion != null) //&& !ExternalCall) // Heff: do we want to suppress this for external calls? should they always autoupdate? { // Seems better to keep it the current way, so that users get prompted if they load old .mods. DialogResult dr = MessageBox.Show(Path.GetFileName(file) + " is old and unsupported by this version of ME3Explorer." + Environment.NewLine + "Click Yes to update .mod now, No to continue loading .mod, or Cancel to stop loading .mod", "Ancient .mod detected.", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Warning); if (dr == System.Windows.Forms.DialogResult.Cancel) { return(null); } else if (dr == System.Windows.Forms.DialogResult.Yes) { AutoUpdate = true; } } /*else if (ExecutingVersion != null) // Heff: could use this for always updating if its an external call: * AutoUpdate = true;*/ // KFreon: Reset stream position if necessary if (!validVersion) { count = versionLength; fs.Seek(countOffset, SeekOrigin.Begin); } // KFreon: Increment progress bar if (progbar != null) { progbar.GetCurrentParent().Invoke(new Action(() => { progbar.Value = 0; progbar.Maximum = count; })); } // KFreon: Read Data DebugOutput.PrintLn("Found " + count + " Jobs", true); modCount += count; for (int i = 0; i < count; i++) { // KFreon: Read name ModMaker.ModJob md = new ModMaker.ModJob(); int len = fs.ReadValueS32(); md.Name = ""; for (int j = 0; j < len; j++) { md.Name += (char)fs.ReadByte(); } // KFreon: Read script len = fs.ReadValueS32(); md.Script = ""; for (int j = 0; j < len; j++) { md.Script += (char)fs.ReadByte(); } // KFreon: Read data len = fs.ReadValueS32(); byte[] buff = fs.ReadBytes(len); md.data = buff; ModMaker.JobList.Add(md); DebugOutput.PrintLn("Add Job \"" + md.Name + "\"", true); if (progbar != null) { progbar.GetCurrentParent().Invoke(new Action(() => progbar.Increment(1))); } } } return(AutoUpdate); }
private void M_worker_ProgressChanged(object sender, ProgressChangedEventArgs e) { m_progress.Increment(e.ProgressPercentage); }
public void Extract(ToolStripLabel tslStatus, ToolStripProgressBar tspExtract) { string tableName = null; string extractSql = null; string keyColumns = null; tslStatus.Text = "Extract started."; try { OleDbCommand cmd = new OleDbCommand( "SELECT count(*) FROM [Object] " + "WHERE ObjectSource='MAX' " + "AND ExtractSql IS NOT NULL " + "AND Active = True", Connection); OleDbDataReader reader = cmd.ExecuteReader(); if (reader.Read()) { tspExtract.Maximum = int.Parse(reader[0].ToString()); } reader.Close(); cmd = new OleDbCommand( "SELECT ObjectName,ExtractSql,KeyColumns FROM [Object] " + "WHERE ObjectSource='MAX' " + "AND ExtractSql IS NOT NULL " + "AND Active = True " + "ORDER BY ObjectName", Connection); reader = cmd.ExecuteReader(); // Iterate through the DataReader and display row while (reader.Read()) { tspExtract.Increment(1); tableName = reader[0].ToString(); extractSql = reader[1].ToString(); keyColumns = reader[2].ToString(); tslStatus.Text = String.Format("Extracting table {0} ({1} of {2})", tableName, tspExtract.Value, tspExtract.Maximum); extractSql = extractSql.Replace("{SITECODE}", Config.Instance.SiteCode); extractSql = extractSql.Replace("{ROOTCODE}", Config.Instance.RootCode); OracleCommand oraCmd = new OracleCommand(extractSql, OracleDatalayer.Instance.Connection); OracleDataReader oraRdr = oraCmd.ExecuteReader(); DataTable schemaTable = oraRdr.GetSchemaTable(); CreateTableFromReader(tableName, schemaTable, keyColumns); // Create the new table, and its primary key CreateInsertSql(tableName, schemaTable); // Prepare the INSERT SQL that will be used for each row while (oraRdr.Read()) { string[] columns = new string[schemaTable.Rows.Count]; int i = 0; foreach (DataRow dr in schemaTable.Rows) { if (oraRdr.IsDBNull(i)) { columns[i] = "NULL"; } else { string sqlSafeValue = oraRdr.GetValue(i).ToString(); sqlSafeValue = sqlSafeValue.Replace("'", "''"); columns[i] = string.Format("'{0}'", sqlSafeValue); } i++; } using (OleDbCommand cmd2 = AccessDatalayer.Instance.Connection.CreateCommand()) { try { cmd2.CommandType = CommandType.Text; cmd2.CommandText = String.Format(_insertSql, String.Join(",", columns)); cmd2.ExecuteNonQuery(); } catch (Exception exc) { MessageBox.Show(exc.Message + "\n" + cmd2.CommandText, "Error Inserting to Access", MessageBoxButtons.OK, MessageBoxIcon.Error); } } } oraRdr.Close(); oraRdr.Dispose(); oraCmd.Dispose(); } reader.Close(); cmd.Dispose(); tslStatus.Text = "Extract complete."; } catch (Exception exc) { MessageBox.Show(exc.Message + "\n" + extractSql, "Error Extracting Data", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
private List <Dictionary <string, List <string> > > ParseSubAccFromFile(string file) { var removeSpecCharRegexp = new Regex(@"[^a-z0-9 ]", RegexOptions.Compiled); var endOfWord = @"\b"; var startOfWord = @"\b"; var wordsToRemove = "fund,class,the".Split(',') .Select(x => new Regex(startOfWord + x + endOfWord, RegexOptions.Compiled)) .ToList(); var removeDuplicateCharRegexp = new Regex(@"[ ]{2,}", RegexOptions.Compiled); var removeValInBktRegexp = new Regex(@"\(.*\)", RegexOptions.Compiled); string[] tikerMarkWordsArr = { "GROWTH", "CAPITAL", "INCOME", "INVESTMENT", "ALLOCATION", "VALUE", "EQUITY", "INDEX", "INTERNATIONAL", "TOTAL", "SMALL", "INSTITUTIONAL", "INFLATION", "MARKET", "SELECT", "RETURN", "FINANCIAL", "ASSET", "CORPORATE", "FUND", "FUNDS", "ENHANCED", "CONVERTIBLE", "RETIREMENT", "MODERATE", "BOND", "SHORT", "INVESTORS", "STOCK", "HEALTH", "BALANCED", "GLOBAL", "INSIGHTS", "GOVERNMENT", "EMERGING", "WORLD", "HEALTHCARE", "TREASURY", "INFO", "REAL", "RESERVES", "MARKETS", "ENERGY", "TECHNOLOGY", "CASH", "RESOURCES", "COMPANY", "LONG", "TERM", "APPRECIATION", "HIGH", "LARGE", "MID", "Portfolio" }; var tickerMarkWordsArrRegex = tikerMarkWordsArr .Select(x => new Regex(startOfWord + x.ToLower() + endOfWord, RegexOptions.Compiled)) .ToList(); var resultIssuers = new List <Dictionary <string, List <string> > >(); var comboList = new Dictionary <string, List <string> >(); var probabilityMass = 0; string detectedIssuer = null; string possibleIsser = null; using (var stream = new FileStream(file, FileMode.Open, FileAccess.Read)) { var dt = XlsxToCsvConverter.XlsxToDataTable(stream, false, 0, 0); prgLine.Maximum = dt.Rows.Count; for (int j = 0; j < dt.Columns.Count; j++) { for (int i = 0; i < dt.Rows.Count; i++) { prgLine.Increment(1); Application.DoEvents(); var row = dt.Rows[i][j]; string rawVal = row.ToString().Trim(); string val = rawVal.ToLower(); var tickerMarksInVal = tickerMarkWordsArrRegex.Where(regex => regex.IsMatch(val)).ToList(); val = removeValInBktRegexp.Replace(val, ""); val = removeSpecCharRegexp.Replace(val, ""); val = wordsToRemove.Aggregate(val, (current, regex) => regex.Replace(current, "")); val = removeDuplicateCharRegexp.Replace(val, "").Trim(); if (string.IsNullOrEmpty(val)) { continue; } if (val.Length <= 100) { if (rawVal.Length >= 4 && rawVal.Any(char.IsLetter) && rawVal.All(x => !char.IsLetter(x) || char.IsUpper(x))) { possibleIsser = detectedIssuer ?? rawVal; probabilityMass = probabilityMass > 1 ? probabilityMass : 1; } if (tickerMarksInVal.Count > 0) { possibleIsser = detectedIssuer ?? possibleIsser ?? ""; probabilityMass = probabilityMass > 2 ? probabilityMass : 2; comboList.AddIfNotExist(possibleIsser, new List <string>()); comboList[possibleIsser].Add(string.Format("{0},{1},{2}", i, j, val)); continue; } if (probabilityMass > 0) { probabilityMass--; long longVal; if (val.Length > 5 && !long.TryParse(val, out longVal)) { comboList.AddIfNotExist(detectedIssuer ?? possibleIsser, new List <string>()); comboList[detectedIssuer ?? possibleIsser].Add(string.Format("{0},{1},{2}", i, j, val)); } continue; } } if (comboList.Count >= 2 || comboList.Values.Sum(x => x.Count) >= 5) { resultIssuers.Add(comboList); } comboList = new Dictionary <string, List <string> >(); detectedIssuer = null; possibleIsser = null; probabilityMass = 0; } // startNewColumn if (comboList.Count >= 2 || comboList.Values.Sum(x => x.Count) >= 5) { resultIssuers.Add(comboList); } comboList = new Dictionary <string, List <string> >(); detectedIssuer = null; possibleIsser = null; probabilityMass = 0; } //finalize if (comboList.Count >= 2 || comboList.Values.Sum(x => x.Count) >= 5) { resultIssuers.Add(comboList); } } using (var fileWriter = new StreamWriter(file + "_result.csv")) { resultIssuers = resultIssuers.OrderByDescending(x => x.Keys.Count).ToList(); var allreadyAddedSubAcc = new HashSet <string>(); foreach (var dict in resultIssuers) { var uniqList = dict.Values.SelectMany(x => x).Where(value => allreadyAddedSubAcc.Add(value.Split(',')[2])).ToList(); if (uniqList.Count() < 5) { continue; } fileWriter.WriteLine("New Combo. KeyCount {0}, ValueCount {1}", dict.Keys.Count, dict.Values.Sum(x => x.Count)); foreach (var issuerToValListPair in dict) { foreach (var value in issuerToValListPair.Value) { fileWriter.WriteLine("{0},{1}", issuerToValListPair.Key, value); } } fileWriter.WriteLine(); } } return(resultIssuers); }