/** * Adds the existing unread states for the specified tab and list of workspaces * to the specified array list. */ private void FillStatesForTab(IList states, IResourceList workspaces, string tab) { if (tab == _tabProvider.GetDefaultTab()) { states.Add(_defaultUnreadState); } IntHashTable tabHash = (IntHashTable)_unreadStateTabMap [tab]; if (tabHash != null) { if (tab != _tabProvider.GetDefaultTab()) { UnreadState state = (UnreadState)tabHash [0]; if (state != null) { states.Add(state); } } foreach (IResource ws in workspaces) { UnreadState state = (UnreadState)tabHash [ws.Id]; if (state != null) { states.Add(state); } } } }
public ImageListColumn(int propID) { _propID = propID; _imageList = new ImageList(); _imageList.ColorDepth = ICore.Instance.ResourceIconManager.IconColorDepth; _valuesForIcons = new IntHashTable(); }
private void LoadModules() { _moduleHash = new IntHashTable(); _dbStream.Position = _ofsFirstModuleName; while (true) { int ofs = (int)_dbStream.Position; int signature = _dbReader.ReadInt32(); if (signature != SIGNATURE_MODULE) { throw new MirandaDatabaseCorruptedException("Database corrupted: invalid module signature"); } int ofsNext = _dbReader.ReadInt32(); int cbName = _dbReader.ReadByte(); if (cbName > 0) { char[] nameChars = _dbReader.ReadChars(cbName); _moduleHash [ofs] = new string( nameChars ); } if (ofsNext == 0) { break; } _dbStream.Position = ofsNext; } }
static void LoadFuncsFromDbiModule(BitAccess bits, DbiModuleInfo info, IntHashTable names, ArrayList funcList, bool readStrings) { PdbFunction[] funcs = null; bits.Position = 0; int sig; bits.ReadInt32(out sig); if (sig != 4) { throw new PdbDebugException("Invalid signature. (sig={0})", sig); } bits.Position = 4; // Console.WriteLine("{0}:", info.moduleName); funcs = PdbFunction.LoadManagedFunctions(info.moduleName, bits, (uint)info.cbSyms, readStrings); if (funcs != null) { bits.Position = info.cbSyms + info.cbOldLines; LoadManagedLines(funcs, names, bits, (uint)(info.cbSyms + info.cbOldLines + info.cbLines)); for (int i = 0; i < funcs.Length; i++) { funcList.Add(funcs[i]); } } }
/** * Switches the state of unread counters to the state associated with the * specified key and filter list, and creates a new state if necessary. */ public UnreadState SetUnreadState(string activeTab, IResource activeWorkspace) { if (activeTab == _tabProvider.GetDefaultTab() && activeWorkspace == null) { _curUnreadState = _defaultUnreadState; } else { IntHashTable tabHash = (IntHashTable)_unreadStateTabMap [activeTab]; if (tabHash == null) { tabHash = new IntHashTable(); _unreadStateTabMap [activeTab] = tabHash; } int wsId = (activeWorkspace == null) ? 0 : activeWorkspace.Id; UnreadState state = (UnreadState)tabHash [wsId]; if (state == null) { state = new UnreadState(this, activeTab, activeWorkspace); tabHash [wsId] = state; } _curUnreadState = state; } return(_curUnreadState); }
public static PdbFunction[] LoadFunctions(Stream read, BitAccess bits, bool readAllStrings) { PdbFileHeader head = new PdbFileHeader(read, bits); PdbReader reader = new PdbReader(read, head.pageSize); MsfDirectory dir = new MsfDirectory(reader, head, bits); DbiModuleInfo[] modules = null; DbiDbgHdr header; dir.streams[1].Read(reader, bits); int nameStream = LoadPdbStream(bits); if (nameStream <= 0) { throw new PdbException("No `name' stream"); } dir.streams[nameStream].Read(reader, bits); IntHashTable names = LoadNameStream(bits); dir.streams[3].Read(reader, bits); LoadDbiStream(bits, out modules, out header, readAllStrings); ArrayList funcList = new ArrayList(); if (modules != null) { for (int m = 0; m < modules.Length; m++) { if (modules[m].stream > 0) { dir.streams[modules[m].stream].Read(reader, bits); LoadFuncsFromDbiModule(bits, modules[m], names, funcList, readAllStrings); } } } PdbFunction[] funcs = (PdbFunction[])funcList.ToArray(typeof(PdbFunction)); // After reading the functions, apply the token remapping table if it exists. if (header.snTokenRidMap != 0 && header.snTokenRidMap != 0xffff) { dir.streams[header.snTokenRidMap].Read(reader, bits); uint[] ridMap = new uint [dir.streams[header.snTokenRidMap].Length / 4]; bits.ReadUInt32(ridMap); foreach (PdbFunction func in funcs) { func.token = 0x06000000 | ridMap[func.token & 0xffffff]; } } // Array.Sort(funcs, PdbFunction.byAddress); //Array.Sort(funcs, PdbFunction.byToken); return(funcs); }
public void ShouldAddIntToTable() { var hashTable = new IntHashTable(); hashTable.Add(1); Assert.True(hashTable.Contains(1)); }
private void AdjustCounterInState(IntHashTable tabMap, int workspaceId, IResource resource, int delta) { if (tabMap != null) { UnreadState tabState = (UnreadState)tabMap [workspaceId]; if (tabState != null) { AdjustUnreadCount(resource, delta, tabState); } } }
static IntHashTable LoadNameStream(BitAccess bits) { IntHashTable ht = new IntHashTable(); uint sig; int ver; bits.ReadUInt32(out sig); // 0..3 Signature bits.ReadInt32(out ver); // 4..7 Version // Read (or skip) string buffer. int buf; bits.ReadInt32(out buf); // 8..11 Bytes of Strings if (sig != 0xeffeeffe || ver != 1) { throw new PdbDebugException("Unsupported Name Stream version. " + "(sig={0:x8}, ver={1})", sig, ver); } int beg = bits.Position; int nxt = bits.Position + buf; bits.Position = nxt; // Read hash table. int siz; bits.ReadInt32(out siz); // n+0..3 Number of hash buckets. nxt = bits.Position; for (int i = 0; i < siz; i++) { int ni; string name; bits.ReadInt32(out ni); if (ni != 0) { int saved = bits.Position; bits.Position = beg + ni; bits.ReadCString(out name); bits.Position = saved; ht.Add(ni, name); } } bits.Position = nxt; return(ht); }
public object GetPropValue(IResource res, int propID) { lock ( _propHashTable ) { IntHashTable propValueHash = (IntHashTable)_propHashTable [propID]; if (propValueHash == null) { return(null); } return(propValueHash [res.Id]); } }
/// <summary> /// When a resource matching a view enters or leaves a workspace, update unread counter for the /// view in that workspace. /// </summary> /// <param name="res">The resource entering or leaving the workspace.</param> /// <param name="viewResource">The view for which the counter should be updated.</param> /// <param name="workspaceId">The ID of the workspace which the resource enters or leaves.</param> /// <param name="delta">The value by which the counter is changed (1 or -1).</param> internal void AdjustViewWorkspaceCounter(IResource res, IResource viewResource, int workspaceId, int delta) { IntHashTable defaultTabMap = (IntHashTable)_unreadStateTabMap [_tabProvider.GetDefaultTab()]; AdjustCounterInState(defaultTabMap, workspaceId, viewResource, delta); string resourceTab = _tabProvider.GetResourceTab(res); if (resourceTab != null) { IntHashTable specificTabMap = (IntHashTable)_unreadStateTabMap [resourceTab]; AdjustCounterInState(specificTabMap, workspaceId, viewResource, delta); } }
private static void FillResult(QueryResult qResult, IntHashTable validEntries) { qResult.Result = null; if (validEntries.Count > 0) { int index = 0; qResult.Result = new Entry[validEntries.Count]; foreach (IntHashTable.Entry e in validEntries) { qResult.Result[index++] = (Entry)e.Value; } } }
public void InvalidateUnreadCounter(IResource res) { _defaultUnreadState.InvalidateCounter(res); foreach (DictionaryEntry de in _unreadStateTabMap) { IntHashTable ht = (IntHashTable)de.Value; foreach (IntHashTable.Entry entry in ht) { UnreadState state = (UnreadState)entry.Value; state.InvalidateCounter(res); } } _curUnreadState.OnUnreadCountChanged(res); }
public QueryResult ProcessQuery(string query, int dummy) { #region Preconditions Debug.Assert(IsIndexPresent, "Intermodule communication error - caller CAN NOT call this method without opened text index"); #endregion Preconditions QueryResult qResult = PerformInitialSearch(query); IntHashTable validEntries = CompressEntries(qResult.Result); FillResult(qResult, validEntries); Trace.WriteLineIf(!_suppTrace, "--- Query [" + query + "]: " + validEntries.Count + " hits found"); return(qResult); }
/// <summary> /// Ensure that there will be no duplicated IDs - this is possible /// when doc is removed from index and then inserted with the same ID. /// Overwriting usually helps :)) /// </summary> private IntHashTable CompressEntries(IEnumerable <Entry> result) { IntHashTable validEntries = new IntHashTable(); if (result != null) { foreach (Entry e in result) // body's not optimal but compact { if (IsDocumentPresent(e.DocIndex)) { validEntries[e.DocIndex] = e; } } } return(validEntries); }
/** * Refreshes the unread counters on all resources. Assumes to be invoked from the * resource thread. */ public void RefreshUnreadCounters() { IResourceList unreadCountedResources = _store.FindResourcesWithProp(null, _propUnreadCount); foreach (IResource unreadCountedRes in unreadCountedResources) { int linkCount; IResourceList unreadCountedLinks = GetUnreadCountedLinks(unreadCountedRes, out linkCount); unreadCountedRes.SetProp(_propUnreadCount, CountUnreadResources(unreadCountedLinks)); } _defaultUnreadState.ResetCounters(); foreach (DictionaryEntry de in _unreadStateTabMap) { IntHashTable tabHash = (IntHashTable)de.Value; foreach (IntHashTable.Entry ie in tabHash) { UnreadState state = (UnreadState)ie.Value; state.ResetCounters(); } } }
protected override void AddResourceNodes() { _conversationNodeMap = new IntHashTable(); foreach (IResource res in _resourceList.ValidResources) { ConversationNode node = GetConversationNode(res); node.InList = true; } ArrayList topLevelNodes = ArrayListPool.Alloc(); try { foreach (ConversationNode node in _conversationRoots) { FillTopLevelNodes(topLevelNodes, node); } if (_lastComparer != null) { topLevelNodes.Sort(new ConversationNodeComparer(_lastComparer)); } foreach (ConversationNode node in topLevelNodes) { JetListViewNode lvNode = AddListViewNode(_listView.Nodes, node); if (node.Children != null || _threadingHandler.CanExpandThread(node.Resource, ThreadExpandReason.Expand)) { lvNode.HasChildren = true; } } } finally { ArrayListPool.Dispose(topLevelNodes); } _listView.ChildrenRequested += HandleChildrenRequested; _listView.NodeCollection.NodeExpandChanging += HandleExpandChanging; }
public void TestIntHashTable() { // update hashtable IntHashTable T = new IntHashTable(); Random Rnd = new Random(); int i = 0; for ( ; i < 1000; ++i) { T[i - 500] = Rnd.Next(10000000).ToString(); } // test IEnumerable implementation int iCount = 0; foreach (IntHashTable.Entry E in T) { ++iCount; if (E.Value == null) { throw new Exception("Null value in IntHashTable"); } } if (iCount != 1000) { throw new Exception("IntHashTable as IEnumerable returns invalid entries"); } // test IDictionary implementation if (!T.Contains(100) || !T.Contains(200)) { throw new Exception("IntHashTable's key resolution error"); } if (T.Count != 1000) { throw new Exception("IntHashTable.Count returned invalid value: " + T.Count.ToString()); } }
private static void PushTermOnStack(string term, IntHashTable tokens, Stack <List <long> > opStack) { List <long> resultVal = null; if (FullTextIndexer.isValuableToken(term)) { int HC; // First check Id of the term in the local cache. Since the amount of // query terms over all queries in the system is several tens (in average), // the size of this cache is small enough. This cache allows not to // consult terms trie each time. if (!_termIDs.TryGetValue(term, out HC)) { HC = Word.GetTokenIndex(term); } if (HC != -1) { if (!_termIDs.ContainsKey(term)) { _termIDs.Add(term, HC); } Object val = tokens[HC]; if (val != null) { resultVal = val as List <long>; if (resultVal == null) { resultVal = new List <long>(); resultVal.Add((long)val); } } } } opStack.Push(resultVal); }
public static bool MatchQuery(QueryPostfixForm postfixForm, IntHashTable tokens) { Stack <List <long> > opStack = new Stack <List <long> >(); bool result; try { IteratePostfixExpression(postfixForm, tokens, opStack); if (opStack.Count != 1) { throw new ApplicationException("QueryParser -- Illegal query statement found"); } result = (opStack.Peek() != null); } catch (Exception exc) { Trace.WriteLine("MatchProcessor -- exception [" + exc.Message + "] occured."); result = true; } opStack.Clear(); return(result); }
private void ProcessWorkspaceChange(IResource res, IPropertyChangeSet cs) { IntHashTable defaultTabMap = (IntHashTable)_unreadStateTabMap [_tabProvider.GetDefaultTab()]; IntHashTable specificTabMap = null; string resourceTab = _tabProvider.GetResourceTab(res); if (resourceTab != null) { specificTabMap = (IntHashTable)_unreadStateTabMap [resourceTab]; } LinkChange[] wsLinkChanges = cs.GetLinkChanges(_workspaceManager.Props.WorkspaceVisible); int[] linkTypes = res.GetLinkTypeIds(); for (int i = 0; i < linkTypes.Length; i++) { if (IsUnreadCountedLink(linkTypes [i])) { IResourceList linkList = res.GetLinksOfType(null, linkTypes [i]); foreach (IResource link in linkList) { if (cs.GetLinkChange(linkTypes [i], link.Id) == LinkChangeType.Add) { continue; } foreach (LinkChange linkChange in wsLinkChanges) { int delta = (linkChange.ChangeType == LinkChangeType.Add) ? 1 : -1; AdjustCounterInState(defaultTabMap, linkChange.TargetId, link, delta); AdjustCounterInState(specificTabMap, linkChange.TargetId, link, delta); } } } } }
public void SetProp(int resourceID, int propID, object propValue) { object oldValue = null; lock ( _propHashTable ) { IntHashTable propValueHash = (IntHashTable)_propHashTable [propID]; if (propValueHash == null) { propValueHash = new IntHashTable(); _propHashTable [propID] = propValueHash; } else { oldValue = propValueHash [resourceID]; } propValueHash [resourceID] = propValue; } if (ResourceChanged != null) { ResourceChanged(this, new PropertyProviderChangeEventArgs(resourceID, propID, oldValue)); } }
internal void SetHighlightDataProvider(IHighlightDataProvider provider) { _contextsRequested = new IntHashSet(); _contextCache = new IntHashTable(); _highlightDataProvider = provider; }
/// <summary> /// If the array of column descriptors has any "show if not empty" or "show if distinct" columns, /// removes the columns which are empty or non-distinct from the column array. /// </summary> /// <param name="columns">The array of columns to filter.</param> /// <param name="resList">The resource list by which the filtering is performed.</param> /// <returns>Filtered array of columns.</returns> internal ColumnDescriptor[] HideEmptyColumns(ColumnDescriptor[] columns, IResourceList resList) { _mayBeEmptyColumns = new ArrayList(); _mayBeEmptyPropIds = new ArrayList(); _distinctColumns = new ArrayList(); _distinctPropIds = new ArrayList(); _distinctValueMap = new IntHashTable(); foreach (ColumnDescriptor colDesc in columns) { if ((colDesc.Flags & ColumnDescriptorFlags.ShowIfNotEmpty) != 0) { _mayBeEmptyColumns.Add(colDesc); _mayBeEmptyPropIds.Add(((DisplayColumnManager)Core.DisplayColumnManager).PropNamesToIDs(colDesc.PropNames, true)); } else if ((colDesc.Flags & ColumnDescriptorFlags.ShowIfDistinct) != 0) { _distinctColumns.Add(colDesc); _distinctPropIds.Add(((DisplayColumnManager)Core.DisplayColumnManager).PropNamesToIDs(colDesc.PropNames, true)); } } if (_mayBeEmptyColumns.Count == 0 && _distinctColumns.Count == 0) { return(columns); } lock ( resList ) { foreach (IResource res in resList.ValidResources) { for (int i = _mayBeEmptyColumns.Count - 1; i >= 0; i--) { if (!IsColumnEmpty(resList, res, i)) { _mayBeEmptyPropIds.RemoveAt(i); _mayBeEmptyColumns.RemoveAt(i); } } for (int i = _distinctColumns.Count - 1; i >= 0; i--) { if (!ValueMatchesDistinctColumn(res, i)) { _distinctPropIds.RemoveAt(i); _distinctColumns.RemoveAt(i); } } if (_mayBeEmptyColumns.Count == 0 && _distinctColumns.Count == 0) { return(columns); } } } ColumnDescriptor[] result = new ColumnDescriptor[columns.Length - _mayBeEmptyColumns.Count - _distinctColumns.Count]; int destIndex = 0; for (int i = 0; i < columns.Length; i++) { if (!_mayBeEmptyColumns.Contains(columns [i]) && !_distinctColumns.Contains(columns [i])) { result [destIndex++] = columns [i]; } } return(result); }
public static void FlushDocument(TermIndexAccessor termIndex, int docId, int maxTermInDoc, IntHashTable tokens) { foreach (IntHashTable.Entry e in tokens) { try { termIndex.AddRecord(docId, e.Key, e.Value, maxTermInDoc); } catch (Exception exc) { Trace.WriteLineIf(!FullTextIndexer._suppTrace, "-- IndexConstructor -- Flushing document -- exception occured with key " + e.Key); throw new FormatException("-- IndexConstructor -- Flushing document -- exception occured with key " + e.Key, exc); } } }
/** * Fills the checklistbox with properties for the specified resource list. */ private void FillPropertyList() { IResourceList allPropList = Core.ResourceStore.GetAllResources("PropType"); allPropList.Sort(new SortSettings(ResourceProps.DisplayName, true)); ArrayList propTypeList = new ArrayList(); IntHashTable propTypeHash = new IntHashTable(); foreach (IResource res in allPropList) { int propId = res.GetIntProp("ID"); if (!Core.ResourceStore.PropTypes [propId].HasFlag(PropTypeFlags.Internal)) { if (StateHasProp(_state, propId) || _availableColumns.IndexOf(propId) >= 0 || _resourceList.HasProp(propId)) { IPropType propType = Core.ResourceStore.PropTypes [propId]; propTypeList.Add(propType); propTypeHash [propId] = propType; } } } if (StateHasProp(_state, ResourceProps.DisplayName) || IsDisplayNameColumnAvailable()) { IPropType displayNamePropType = Core.ResourceStore.PropTypes [ResourceProps.DisplayName]; propTypeList.Add(displayNamePropType); propTypeHash [ResourceProps.DisplayName] = displayNamePropType; } Hashtable nameToPropTagMap = new Hashtable(); // first, add the columns already in the list, in the list order foreach (ColumnDescriptor colDesc in _state.Columns) { int[] propIds = _displayColumnManager.PropNamesToIDs(colDesc.PropNames, true); if (propIds.Length == 1 && propIds [0] == ResourceProps.Type) { continue; } bool[] reverseLinks = new bool [propIds.Length]; for (int i = 0; i < propIds.Length; i++) { reverseLinks [i] = AreLinksReverse(_resourceList, propIds [i]); } for (int i = 0; i < propIds.Length; i++) { IPropType propType = (IPropType)propTypeHash [propIds [i]]; if (propType == null) { propType = (IPropType)propTypeHash [-propIds [i]]; } if (propType != null) { propTypeList.Remove(propType); } } PropertyTypeTag tag = AddItemForPropType(colDesc, propIds, reverseLinks, true); nameToPropTagMap [tag.ToString()] = tag; } AddUncheckedColumns(propTypeList, _resourceList, nameToPropTagMap); }
private static IntHashTable LoadNameStream(BitAccess bits) { IntHashTable ht = new IntHashTable(); uint sig; int ver; bits.ReadUInt32(out sig); // 0..3 Signature bits.ReadInt32(out ver); // 4..7 Version // Read (or skip) string buffer. int buf; bits.ReadInt32(out buf); // 8..11 Bytes of Strings if (sig != 0xeffeeffe || ver != 1) { throw new Exception(string.Format("Unsupported Name Stream version. (sig={0:x8}, ver={1})", sig, ver)); } int beg = bits.Position; int nxt = bits.Position + buf; bits.Position = nxt; // Read hash table. int siz; bits.ReadInt32(out siz); // n+0..3 Number of hash buckets. nxt = bits.Position; for (int i = 0; i < siz; i++) { int ni; string name; bits.ReadInt32(out ni); if (ni != 0) { int saved = bits.Position; bits.Position = beg + ni; bits.ReadCString(out name); bits.Position = saved; ht.Add(ni, name); } } bits.Position = nxt; return ht; }
private static IntHashTable ReadSourceFileInfo( BitAccess bits, uint limit, IntHashTable names, MsfDirectory dir, Dictionary<string, int> nameIndex, PdbReader reader) { IntHashTable checks = new IntHashTable(); int begin = bits.Position; while (bits.Position < limit) { int sig; int siz; bits.ReadInt32(out sig); bits.ReadInt32(out siz); int place = bits.Position; int endSym = bits.Position + siz; switch ((DEBUG_S_SUBSECTION)sig) { case DEBUG_S_SUBSECTION.FILECHKSMS: while (bits.Position < endSym) { CV_FileCheckSum chk; int ni = bits.Position - place; bits.ReadUInt32(out chk.name); bits.ReadUInt8(out chk.len); bits.ReadUInt8(out chk.type); PdbSource src = new PdbSource(/*(uint)ni,*/ (string)names[(int)chk.name], SymDocumentType.Text, Guid.Empty, Guid.Empty); checks.Add(ni, src); bits.Position += chk.len; bits.Align(4); } bits.Position = endSym; break; default: bits.Position = endSym; break; } } return checks; }
private static void LoadTokenToSourceInfo( BitAccess bits, DbiModuleInfo module, IntHashTable names, MsfDirectory dir, Dictionary<string, int> nameIndex, PdbReader reader, Dictionary<uint, PdbTokenLine> tokenToSourceMapping) { bits.Position = 0; int sig; bits.ReadInt32(out sig); if (sig != 4) { throw new Exception(string.Format("Invalid signature. (sig={0})", sig)); } bits.Position = 4; while (bits.Position < module.cbSyms) { ushort siz; ushort rec; bits.ReadUInt16(out siz); int star = bits.Position; int stop = bits.Position + siz; bits.Position = star; bits.ReadUInt16(out rec); switch ((SYM)rec) { case SYM.S_OEM: OemSymbol oem; bits.ReadGuid(out oem.idOem); bits.ReadUInt32(out oem.typind); // internal byte[] rgl; // user data, force 4-byte alignment if (oem.idOem == MsilMetaData) { string name = bits.ReadString(); if (name == "TSLI") { uint token; uint file_id; uint line; uint column; uint endLine; uint endColumn; bits.ReadUInt32(out token); bits.ReadUInt32(out file_id); bits.ReadUInt32(out line); bits.ReadUInt32(out column); bits.ReadUInt32(out endLine); bits.ReadUInt32(out endColumn); PdbTokenLine tokenLine; if (!tokenToSourceMapping.TryGetValue(token, out tokenLine)) tokenToSourceMapping.Add(token, new PdbTokenLine(token, file_id, line, column, endLine, endColumn)); else { while (tokenLine.nextLine != null) tokenLine = tokenLine.nextLine; tokenLine.nextLine = new PdbTokenLine(token, file_id, line, column, endLine, endColumn); } } bits.Position = stop; break; } else { throw new Exception(string.Format("OEM section: guid={0} ti={1}", oem.idOem, oem.typind)); } case SYM.S_END: bits.Position = stop; break; default: bits.Position = stop; break; } } bits.Position = module.cbSyms + module.cbOldLines; int limit = module.cbSyms + module.cbOldLines + module.cbLines; IntHashTable sourceFiles = ReadSourceFileInfo(bits, (uint)limit, names, dir, nameIndex, reader); foreach (var tokenLine in tokenToSourceMapping.Values) { tokenLine.sourceFile = (PdbSource)sourceFiles[(int)tokenLine.file_id]; } }
public void WhenNumberNotInTable_ContainsShouldReturnFalse() { var hashTable = new IntHashTable(); Assert.False(hashTable.Contains(0)); }
static void LoadManagedLines(PdbFunction[] funcs, IntHashTable names, BitAccess bits, uint limit) { Array.Sort(funcs, PdbFunction.byAddress); checks.Clear(); // Read the files first int begin = bits.Position; while (bits.Position < limit) { int sig; int siz; bits.ReadInt32(out sig); bits.ReadInt32(out siz); int place = bits.Position; int endSym = bits.Position + siz; switch ((DEBUG_S_SUBSECTION)sig) { case DEBUG_S_SUBSECTION.FILECHKSMS: while (bits.Position < endSym) { CV_FileCheckSum chk; int ni = bits.Position - place; bits.ReadUInt32(out chk.name); bits.ReadUInt8(out chk.len); bits.ReadUInt8(out chk.type); string name = (string)names[(int)chk.name]; PdbSource src = new PdbSource((uint)ni, name); checks.Add(ni, src); bits.Position += chk.len; bits.Align(4); } bits.Position = endSym; break; default: bits.Position = endSym; break; } } // Read the lines next. bits.Position = begin; while (bits.Position < limit) { int sig; int siz; bits.ReadInt32(out sig); bits.ReadInt32(out siz); int endSym = bits.Position + siz; switch ((DEBUG_S_SUBSECTION)sig) { case DEBUG_S_SUBSECTION.LINES: { CV_LineSection sec; bits.ReadUInt32(out sec.off); bits.ReadUInt16(out sec.sec); bits.ReadUInt16(out sec.flags); bits.ReadUInt32(out sec.cod); PdbFunction func = FindFunction(funcs, sec.sec, sec.off); // Count the line blocks. int begSym = bits.Position; int blocks = 0; while (bits.Position < endSym) { CV_SourceFile file; bits.ReadUInt32(out file.index); bits.ReadUInt32(out file.count); bits.ReadUInt32(out file.linsiz); // Size of payload. int linsiz = (int)file.count * (8 + ((sec.flags & 1) != 0 ? 4 : 0)); bits.Position += linsiz; blocks++; } func.lines = new PdbLines[blocks]; int block = 0; bits.Position = begSym; while (bits.Position < endSym) { CV_SourceFile file; bits.ReadUInt32(out file.index); bits.ReadUInt32(out file.count); bits.ReadUInt32(out file.linsiz); // Size of payload. PdbSource src = (PdbSource)checks[(int)file.index]; PdbLines tmp = new PdbLines(src, file.count); func.lines[block++] = tmp; PdbLine[] lines = tmp.lines; int plin = bits.Position; int pcol = bits.Position + 8 * (int)file.count; for (int i = 0; i < file.count; i++) { CV_Line line; CV_Column column = new CV_Column(); bits.Position = plin + 8 * i; bits.ReadUInt32(out line.offset); bits.ReadUInt32(out line.flags); uint delta = (line.flags & 0x7f000000) >> 24; bool statement = ((line.flags & 0x80000000) == 0); if ((sec.flags & 1) != 0) { bits.Position = pcol + 4 * i; bits.ReadUInt16(out column.offColumnStart); bits.ReadUInt16(out column.offColumnEnd); } lines[i] = new PdbLine(line.offset, line.flags & 0xffffff, column.offColumnStart, column.offColumnEnd); } } break; } } bits.Position = endSym; } }
public static int Main() { Console.WriteLine("Lab №4, Ozierski Vital, group 052004"); IntHashTable ih = new IntHashTable(50); StringHashTable sh = new StringHashTable(16); string cmd, command, mode; bool found; while (true) { Console.WriteLine(); Console.WriteLine("Commands: add, find, print"); Console.WriteLine("Format: <command> <int|string> or exit"); Console.Write("? "); cmd = Console.ReadLine(); if (cmd == "exit") break; if (cmd.Split(' ').Length != 2) Console.WriteLine("Incorrect command format"); else { command = cmd.Split(' ')[0]; mode = cmd.Split(' ')[1]; if (mode != "string" && mode != "int") Console.WriteLine("Incorrect mode"); else { if (command == "add") { Console.Write("Value: "); cmd = Console.ReadLine(); if (mode == "string") sh.Add(cmd); else if (mode == "int") ih.Add(Int32.Parse(cmd)); } else if (command == "find") { Console.Write("Value: "); cmd = Console.ReadLine(); found = false; if (mode == "string") found = sh.Find(cmd); else if (mode == "int") found = ih.Find(Int32.Parse(cmd)); Console.WriteLine(found ? "Found" : "No found"); } else if (command == "print") { if (mode == "string") sh.Print(); else if (mode == "int") ih.Print(); } else Console.WriteLine("Incorrect Command"); } Console.WriteLine(); } } return 0; }
private static void IteratePostfixExpression(IList <QueryParserNode> postfixForm, IntHashTable tokens, Stack <List <long> > opStack) { for (int i = 0; i < postfixForm.Count; i++) { QueryParserNode node = postfixForm[i]; switch (node.NodeType) { case QueryParserNode.Type.eoTerm: PushTermOnStack(((TermNode)node).Term, tokens, opStack); break; case QueryParserNode.Type.eoSection: UnarySectionOp(((SectionNode)node).SectionName, opStack); break; default: BinaryOp(node, opStack); break; } } }