public Page(IJournaledResource data, long number, int size) { Data = data; Number = number; Size = size; refCount = 0; Reset(); }
internal void Close(IJournaledResource data) { long id = data.Id; // Flush all changes made to the resource then close. lock (pageMap) { // Flush all the pages out to the log. // This scans the entire hash for values and could be an expensive // operation. Fortunately 'close' isn't used all that often. for (int i = 0; i < pageMap.Length; ++i) { var page = pageMap[i]; Page prev = null; while (page != null) { bool deletedHash = false; if (page.Id == id) { lock (page) { // Flush the page (will only actually flush if there are changes) page.Flush(); // Remove this page if it is no longer in use if (!page.IsUsed) { deletedHash = true; if (prev == null) { pageMap[i] = page.Next; } else { prev.Next = page.Next; } } } } // Go to next page in hash chain if (!deletedHash) { prev = page; } page = page.Next; } } } data.Close(); }
internal int ReadFrom(IJournaledResource data, long position, byte[] buffer, int offset, int count) { int origLen = count; long pageNumber = position / PageSize; int startOffset = (int)(position % PageSize); int toRead = System.Math.Min(count, PageSize - startOffset); var page = FetchPage(data, pageNumber); lock (page) { try { page.Open(); page.Read(startOffset, buffer, offset, toRead); } finally { page.Dispose(); } } count -= toRead; while (count > 0) { offset += toRead; position += toRead; ++pageNumber; toRead = System.Math.Min(count, PageSize); page = FetchPage(data, pageNumber); lock (page) { try { page.Open(); page.Read(0, buffer, offset, toRead); } finally { page.Dispose(); } } count -= toRead; } return(origLen); }
internal void WriteTo(IJournaledResource data, long position, byte[] buf, int off, int len) { long pageNumber = position / PageSize; int startOffset = (int)(position % PageSize); int toWrite = System.Math.Min(len, PageSize - startOffset); var page = FetchPage(data, pageNumber); lock (page) { try { page.Open(); page.Write(startOffset, buf, off, toWrite); } finally { page.Dispose(); } } len -= toWrite; while (len > 0) { off += toWrite; position += toWrite; ++pageNumber; toWrite = System.Math.Min(len, PageSize); page = FetchPage(data, pageNumber); lock (page) { try { page.Open(); page.Write(0, buf, off, toWrite); } finally { page.Dispose(); } } len -= toWrite; } }
internal JournaledFileStore(string resourceName, LoggingBufferManager bufferManager, bool readOnly) : base(readOnly) { this.bufferManager = bufferManager; resource = bufferManager.CreateResource(resourceName); }
public void Write(IJournaledResource data, long position, byte[] buffer, int offset, int length) { throw new NotImplementedException(); }
internal JournaledFileStore(string resourceName, LoggingBufferManager bufferManager, bool readOnly) : base(readOnly) { this.bufferManager = bufferManager; resource = bufferManager.CreateResource(resourceName); }
private Page FetchPage(IJournaledResource data, long pageNumber) { long id = data.Id; Page prevPage = null; bool newPage = false; Page page; lock (pageMap) { // Generate the hash code for this page. int p = (CalcHashCode(id, pageNumber) & 0x07FFFFFFF) % pageMap.Length; // Search for this page in the hash page = pageMap[p]; while (page != null && !page.Matches(id, pageNumber)) { prevPage = page; page = page.Next; } // Page isn't found so create it and add to the cache if (page == null) { page = new Page(data, pageNumber, PageSize); // Add this page to the map page.Next = pageMap[p]; pageMap[p] = page; } else { // Move this page to the head if it's not already at the head. if (prevPage != null) { prevPage.Next = page.Next; page.Next = pageMap[p]; pageMap[p] = page; } } lock (page) { // If page not in use then it must be newly setup, so add a // reference. if (!page.IsUsed) { page.Reset(); newPage = true; page.Reference(); } // Add a reference for this fetch page.Reference(); } } // If the page is new, if (newPage) { OnPageCreated(page); } else { OnPageAccessed(page); } // Return the page. return(page); }