/// <summary> /// Reads the list of index items whose head is stored at the page index provided. /// The data read from persistent storage is merges with the data already in memory /// using the following protocol: /// if the in memory data has the "IsDirty" flag set then we keep the in memory data /// else we over-write the in-memory data with the data from disk /// </summary> /// <param name="stream">data file to read from</param> /// <param name="pageIdx">index of the first physical page storing the list</param> /// <returns>returns the index of the first physical page we read data from</returns> public int ReadIndexData(FileStreamWrapper stream, int pageIdx) { List <IndexItem <T> > itemList = null; List <int> pageIdxList = null; // create reader ListReader <IndexItem <T> > reader = new ListReader <IndexItem <T> >(); reader.ReadList(stream, pageIdx, out itemList, out pageIdxList); // merge with current data for (int idx = 0; idx < itemList.Count; idx++) { var item = itemList[idx]; IndexItem <T> indexEntry = null; if (this.indexMap.TryGetValue(item.ResourceId, out indexEntry)) { if (null == indexEntry || indexEntry.IsDirty) { continue; } } this.indexMap[item.ResourceId] = item; } // update page index this.indexStoragePages = pageIdxList; // return index of the first page return(this.indexStoragePages[0]); }
public void SPT_TestReadWriteTable() { int entryCount = 40; int physicalPageDistance = 5; string dataFile = "SPT_TestData1.tpdb"; if (File.Exists(dataFile)) { File.Delete(dataFile); } List <int> freedPages = null; StoragePageManager spaceMgr = new StoragePageManager(); StoragePageTable pageTable2 = new StoragePageTable(); StoragePageTable pageTable = new StoragePageTable(); for (int idx = 0; idx < entryCount; idx++) { pageTable.SetLogicalPage(idx + physicalPageDistance); } using (FileStreamWrapper dataFileStream = FileStreamWrapper.CreateObject(dataFile)) { int root = pageTable.WritePageTableData(dataFileStream, spaceMgr, out freedPages); dataFileStream.Seek(0, SeekOrigin.Begin); pageTable2.ReadPageTableData(dataFileStream, root); } for (int idx = 0; idx < entryCount; idx++) { int physicalAddress = pageTable2.GetPhysicalPage(idx); Assert.AreEqual(idx + physicalPageDistance, physicalAddress); } }
[InlineData(5, 20, 10)] // Internal buffer too small, force a move-to operation public void ReadData(int internalBufferLength, int outBufferLength, int requestedBytes) { // Setup: // ... I have a file that has a handful of bytes in it string fileName = Path.GetTempFileName(); const string stringToWrite = "hello"; CreateTestFile(fileName, stringToWrite); byte[] targetBytes = Encoding.Unicode.GetBytes(stringToWrite); try { // If: // ... I have a file stream wrapper that has been initialized to an existing file // ... And I read some bytes from it int bytesRead; byte[] buf = new byte[outBufferLength]; using (FileStreamWrapper fsw = new FileStreamWrapper()) { fsw.Init(fileName, internalBufferLength, FileAccess.Read); bytesRead = fsw.ReadData(buf, targetBytes.Length); } // Then: // ... I should get those bytes back Assert.Equal(targetBytes.Length, bytesRead); Assert.True(targetBytes.Take(targetBytes.Length).SequenceEqual(buf.Take(targetBytes.Length))); } finally { // Cleanup: // ... Delete the test file CleanupTestFile(fileName); } }
/// <summary> /// Reads the list of page table items whose head is stored at the page index provided. /// The data read from persistent storage is merges with the data already in memory /// using the following protocol: /// if the in memory data has the "IsDirty" flag set then we keep the in memory data /// else we over-write the in-memory data with the data from disk /// </summary> /// <param name="stream">data file to read from</param> /// <param name="pageIdx">index of the first physical page storing the list</param> /// <returns>returns the index of the first physical page we read data from</returns> public int ReadPageTableData(FileStreamWrapper stream, int pageIdx) { List <PageTableItem> itemList = null; List <int> pageIdxList = null; // create reader ListReader <PageTableItem> reader = new ListReader <PageTableItem>(); reader.ReadList(stream, pageIdx, out itemList, out pageIdxList); // merge with current data for (int idx = 0; idx < this.pageTable.Count && idx < itemList.Count; idx++) { if (!this.pageTable[idx].IsDirty) { this.pageTable[idx] = itemList[idx]; } } // add the missing ones if (this.pageTable.Count < itemList.Count) { this.pageTable.AddRange( itemList.GetRange( this.pageTable.Count, (itemList.Count - this.pageTable.Count))); } // update page index this.pageTableStoragePages = pageIdxList; // return index of the first page return(this.pageTableStoragePages[0]); }
public void InitSuccessful() { string fileName = Path.GetTempFileName(); try { using (FileStreamWrapper fsw = new FileStreamWrapper()) { // If: // ... I have a file stream wrapper that is initialized with valid parameters fsw.Init(fileName, 8192, FileAccess.ReadWrite); // Then: // ... The file should exist FileInfo fileInfo = new FileInfo(fileName); Assert.True(fileInfo.Exists); } } finally { // Cleanup: // ... Delete the file that was created try { File.Delete(fileName); } catch { /* Don't care */ } } }
/// <summary> /// Remarks: /// No locking needed beause this method cannot be invoked on the object. /// It is only called during initialization before the object is returned /// from the object factory method. /// </summary> /// <param name="filePath"></param> private void Init(string filePath) { // open the file this.dataFile = FileStreamWrapper.CreateObject(filePath); DBHdr dbRoot = this.ReadDBRoot(); if (null == dbRoot) { // setup the data file this.InitializeDataFile(); // flush the file this.dataFile.Flush(true); } else { // read the prepared transaction data this.preparedContextMap.ReadTransactionTableData( this.dataFile, dbRoot.PrepedTransactions); // read the data manager this.pageManager.ReadPageManagerData( this.dataFile, dbRoot.PageManager); } }
static public IEnumerable <(string filename, string content)> ReadFromFileStreamBackwards(string fileName, Func <string, FileStream> OpenFile, long maxLines = long.MaxValue) { FileSeeker f = new FileSeeker(); using var fileStream = new FileStreamWrapper(OpenFile(fileName)); fileStream.Seek(0, SeekOrigin.End); // We want to read backwards - so start at the end for (; ;) { string line = null; try { line = f.SeekLastLineFromCurrentAndPositionOnStartOfItAndReturnReadLine(fileStream); } catch (Exception) { } if (line != null && --maxLines > 0) { yield return(Path.GetFileNameWithoutExtension(fileName), line); } else { break; } } }
public static bool Exists(string path) { if (String.IsNullOrEmpty(path)) { return(false); } StreamWrapper wrapper; if (StatInternalCheck(ref path, true, out wrapper)) { string url; if (StatInternalTryCache(path, out url)) { return(true); } // we can't just call {Directory|File}.Exists since we have to throw warnings // also we are not calling full stat(), it is slow return(FileStreamWrapper.HandleNewFileSystemInfo(false, path, (p) => new FileInfo(p).Exists || new DirectoryInfo(p).Exists)); } return(false); }
public int ReadPageData(FileStreamWrapper stream, int pageIdx) { if (null == stream) { throw new ArgumentNullException(); } byte[] dataBuffer = new byte[PageSize]; // read from the file stream if (0 > pageIdx || stream.Length < (pageIdx + 1) * PageSize) { throw new InvalidPageException(); } stream.Seek(pageIdx * PageSize, SeekOrigin.Begin); stream.Read(dataBuffer, 0, dataBuffer.Length); // initialize members this.recordList = new List<byte[]>(); this.ReadPageData(dataBuffer); return pageIdx; }
public async Task <HttpResponseMessage> StopProfileAsync(int id) { using (_tracer.Step("ProcessController.StopProfileAsync")) { // check if the process Ids exists in the sandbox. If it doesn't, this method returns a 404 and we are done. var process = GetProcessById(id); bool iisProfiling = ProfileManager.IsIisProfileRunning(process.Id); var result = await ProfileManager.StopProfileAsync(process.Id, _tracer, iisProfiling); if (result.StatusCode != HttpStatusCode.OK) { return(Request.CreateErrorResponse(result.StatusCode, result.Message)); } else { string profileFileFullPath = ProfileManager.GetProfilePath(process.Id, iisProfiling); string profileFileName = Path.GetFileName(profileFileFullPath); HttpResponseMessage response = Request.CreateResponse(); response.Content = new StreamContent(FileStreamWrapper.OpenRead(profileFileFullPath)); response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); response.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment"); response.Content.Headers.ContentDisposition.FileName = profileFileName; return(response); } } }
public int ReadPageData(FileStreamWrapper stream, int pageIdx) { if (null == stream) { throw new ArgumentNullException(); } byte[] dataBuffer = new byte[PageSize]; // read from the file stream if (0 > pageIdx || stream.Length < (pageIdx + 1) * PageSize) { throw new InvalidPageException(); } stream.Seek(pageIdx * PageSize, SeekOrigin.Begin); stream.Read(dataBuffer, 0, dataBuffer.Length); // initialize members this.recordList = new List <byte[]>(); this.ReadPageData(dataBuffer); return(pageIdx); }
private void FileStream_Save(object sender, EventArgs e) { SaveFileDialog sfd = new SaveFileDialog(); sfd.FileName = fileName; sfd.ShowDialog(); FileStreamWrapper.SaveFile(textBox1.Text); }
private void FileStrea_Load(object sender, EventArgs e) { OpenFileDialog ofd = new OpenFileDialog(); ofd.ShowDialog(); fileName = ofd.FileName; FileStreamWrapper.fileName = fileName; textBox1.Text = FileStreamWrapper.OpenFile(); }
public HttpResponseMessage GCDump(int id, int maxDumpCountK = 0, string format = null) { using (_tracer.Step("ProcessController.GCDump")) { DumpFormat dumpFormat = ParseDumpFormat(format, DumpFormat.DiagSession); var process = GetProcessById(id); var ext = dumpFormat == DumpFormat.DiagSession ? "diagsession" : "gcdump"; string dumpFile = Path.Combine(_environment.LogFilesPath, "minidump", "dump." + ext); FileSystemHelpers.EnsureDirectory(_fileSystem, Path.GetDirectoryName(dumpFile)); FileSystemHelpers.DeleteFileSafe(_fileSystem, dumpFile); string resourcePath = GetResponseFileName(process.ProcessName, "gcdump"); try { using (_tracer.Step(String.Format("GCDump pid={0}, name={1}, file={2}", process.Id, process.ProcessName, dumpFile))) { process.GCDump(dumpFile, resourcePath, maxDumpCountK, _tracer, _settings.GetCommandIdleTimeout()); _tracer.Trace("GCDump size={0}", new FileInfo(dumpFile).Length); } } catch (Exception ex) { _tracer.TraceError(ex); FileSystemHelpers.DeleteFileSafe(_fileSystem, dumpFile); return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, ex.Message)); } if (dumpFormat == DumpFormat.Zip) { string responseFileName = GetResponseFileName(process.ProcessName, "zip"); HttpResponseMessage response = Request.CreateResponse(); response.Content = ZipStreamContent.Create(responseFileName, _tracer, zip => { try { zip.AddFile(dumpFile, String.Empty); } finally { FileSystemHelpers.DeleteFileSafe(_fileSystem, dumpFile); } }); return(response); } else { string responseFileName = GetResponseFileName(process.ProcessName, ext); HttpResponseMessage response = Request.CreateResponse(); response.Content = new StreamContent(FileStreamWrapper.OpenRead(dumpFile, _fileSystem)); response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); response.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment"); response.Content.Headers.ContentDisposition.FileName = responseFileName; return(response); } } }
public void InitInvalidBufferLength(int bufferLength) { // If: // ... I have a file stream wrapper that is initialized with an invalid buffer length // Then: // ... I should throw an argument out of range exception using (FileStreamWrapper fsw = new FileStreamWrapper()) { Assert.Throws <ArgumentOutOfRangeException>(() => fsw.Init("validFileName", bufferLength, FileAccess.Read)); } }
public void InitInvalidFilenameParameter(string fileName) { // If: // ... I have a file stream wrapper that is initialized with invalid fileName // Then: // ... It should throw an argument null exception using (FileStreamWrapper fsw = new FileStreamWrapper()) { Assert.Throws <ArgumentException>(() => fsw.Init(fileName, 8192, FileAccess.Read)); } }
public void InitInvalidFileAccessMode() { // If: // ... I attempt to open a file stream wrapper that is initialized with an invalid file // access mode // Then: // ... I should get an invalid argument exception using (FileStreamWrapper fsw = new FileStreamWrapper()) { Assert.Throws <ArgumentException>(() => fsw.Init("validFileName", 8192, FileAccess.Write)); } }
public void PerformWriteOpOnReadOnlyWrapper() { byte[] buf = new byte[10]; using (FileStreamWrapper fsw = new FileStreamWrapper()) { // If: // ... I have a readonly file stream wrapper // Then: // ... Attempting to perform any write operation should result in an exception Assert.Throws <InvalidOperationException>(() => fsw.WriteData(buf, 1)); Assert.Throws <InvalidOperationException>(() => fsw.Flush()); } }
public void PerformOpWithoutInit() { byte[] buf = new byte[10]; using (FileStreamWrapper fsw = new FileStreamWrapper()) { // If: // ... I have a file stream wrapper that hasn't been initialized // Then: // ... Attempting to perform any operation will result in an exception Assert.Throws <InvalidOperationException>(() => fsw.ReadData(buf, 1)); Assert.Throws <InvalidOperationException>(() => fsw.ReadData(buf, 1, 0)); Assert.Throws <InvalidOperationException>(() => fsw.WriteData(buf, 1)); Assert.Throws <InvalidOperationException>(() => fsw.Flush()); } }
/// <summary> /// Writes the page table item to persitent storage as a list of items. /// </summary> /// <param name="stream">data file</param> /// <param name="manager">object that keeps track of free pages in the file</param> /// <param name="freedPages">list of pages to be freed when transaction commits</param> /// <returns>index of the first page storing the list</returns> public int WritePageTableData(FileStreamWrapper stream, StoragePageManager manager, out List <int> freedPages) { List <int> pageIdxList = null; // create the writer ListWriter <PageTableItem> writer = new ListWriter <PageTableItem>(); writer.WriteList(stream, manager, this.pageTable, out pageIdxList); // update the list that stores the physical page idx freedPages = this.pageTableStoragePages; this.pageTableStoragePages = pageIdxList; // return the index of the first page return(this.pageTableStoragePages[0]); }
/// <summary> /// Reads the list of data items whose head is stored at the page index provided. /// </summary> /// <param name="stream">data file to read from</param> /// <param name="pageIdx">index of the first physical page storing the list</param> /// <returns>returns the index of the first physical page we read data from</returns> public int ReadPageManagerData(FileStreamWrapper stream, int pageIdx) { List<int> itemList = null; List<int> pageIdxList = null; // create reader ListReader<int> reader = new ListReader<int>(); reader.ReadList(stream, pageIdx, out itemList, out pageIdxList); // merge with current data this.SetFreePages(itemList); // update page index this.managerStoragePages = pageIdxList; return this.managerStoragePages[0]; }
public void SP_TestReadWritePage() { string dataFile = "SP_TestData1.tpdb"; if (File.Exists(dataFile)) { File.Delete(dataFile); } TestData[] pageData = { new TestData { data = "Record_0", recordIdx = 0 }, new TestData { data = "Record_1", recordIdx = 1 }, new TestData { data = "Record_2", recordIdx = 2 } }; int pageIndex = int.MinValue; // write the page using (FileStreamWrapper dataFileStream = FileStreamWrapper.CreateObject(dataFile)) { // populate the page with some data StoragePage page = new StoragePage(); AddRecords(page, pageData); // write the file to disk pageIndex = page.WritePageData(dataFileStream, -1); } // read the page using (FileStreamWrapper dataFileStream = FileStreamWrapper.CreateObject(dataFile)) { // read page from file StoragePage page = new StoragePage(); page.ReadPageData(dataFileStream, pageIndex); // validate the page data ReadRecords(page, pageData); } }
/// <summary> /// Reads the list of data items whose head is stored at the page index provided. /// </summary> /// <param name="stream">data file to read from</param> /// <param name="pageIdx">index of the first physical page storing the list</param> /// <returns>returns the index of the first physical page we read data from</returns> public int ReadPageManagerData(FileStreamWrapper stream, int pageIdx) { List <int> itemList = null; List <int> pageIdxList = null; // create reader ListReader <int> reader = new ListReader <int>(); reader.ReadList(stream, pageIdx, out itemList, out pageIdxList); // merge with current data this.SetFreePages(itemList); // update page index this.managerStoragePages = pageIdxList; return(this.managerStoragePages[0]); }
public static bool IsFile(string path) { StreamWrapper wrapper; if (StatInternalCheck(ref path, false, out wrapper)) { string url; if (StatInternalTryCache(path, out url)) { return(((FileModeFlags)statCache.st_mode & FileModeFlags.File) != 0); } // we can't just call File.Exists since we have to throw warnings // also we are not calling full stat(), it is slow return(FileStreamWrapper.HandleNewFileSystemInfo(false, path, (p) => new FileInfo(p).Exists)); } return(false); }
/// <summary> /// Writes the data item to persitent storage as a list of items. /// </summary> /// <param name="stream">data file to write to</param> /// <param name="manager">object that keeps track of free pages in the file</param> /// <param name="freedPages">list of pages to be freed when transaction commits</param> /// <returns>index of the first page storing the list</returns> public int WritePageManagerData(FileStreamWrapper stream) { lock (this.freePages) { this.writingSelf = true; // make the list of pages to write this.freePages.Insert(0, SelfWriteBarrier); this.freePages.InsertRange(0, this.managerStoragePages); // create writer ListWriter <int> writer = new ListWriter <int>(); writer.WriteList(stream, this, this.freePages, out this.managerStoragePages); this.writingSelf = false; } return(this.managerStoragePages[0]); }
/// <summary> /// Writes the index item to persitent storage as a list of items. /// We handle deletes by setting the "value" in the map to null and /// not writing any items whose value is null to persistent storage. /// </summary> /// <param name="stream">data file to write to</param> /// <param name="manager">object that keeps track of free pages in the file</param> /// <param name="freedPages">list of pages to be freed when transaction commits</param> /// <returns>index of the first page storing the list</returns> public int WriteIndexData(FileStreamWrapper stream, StoragePageManager manager, out List <int> freedPages) { List <int> pageIdxList = null; // create the writer ListWriter <IndexItem <T> > writer = new ListWriter <IndexItem <T> >(); writer.WriteList( stream, manager, this.indexMap.Values.Where(c => c != null).ToList(), out pageIdxList); // update the list that stores the physical page idx freedPages = this.indexStoragePages; this.indexStoragePages = pageIdxList; // return the index of the first page return(this.indexStoragePages[0]); }
public static int GetSize(string path) { StreamWrapper wrapper; if (StatInternalCheck(ref path, false, out wrapper)) { string url; if (StatInternalTryCache(path, out url)) { return(statCache.st_size); } // we are not calling full stat(), it is slow return(FileStreamWrapper.HandleNewFileSystemInfo(-1, path, (p) => FileSystemUtils.FileSize(new FileInfo(p)))); } return(-1); //bool ok = StatInternal(path, false); //if (!ok) return -1; //return statCache.st_size; }
/// <summary> /// Reads the list of data items whose head is stored at the page index provided. /// </summary> /// <param name="stream">data file to read from</param> /// <param name="pageIdx">index of the first physical page storing the list</param> /// <returns>returns the index of the first physical page we read data from</returns> public int ReadTransactionTableData(FileStreamWrapper stream, int pageIdx) { List<TransItem> itemList = null; List<int> pageIdxList = null; // create reader ListReader<TransItem> reader = new ListReader<TransItem>(); reader.ReadList(stream, pageIdx, out itemList, out pageIdxList); // clear the current data this.contextTable.Clear(); foreach (TransItem item in itemList) { this.contextTable.Add(item.Transaction, item); } // update page index this.contextTableStoragePages = pageIdxList; // return index of the first page return this.contextTableStoragePages[0]; }
public int WritePageData(FileStreamWrapper stream, int pageIdx) { if (null == stream) { throw new ArgumentNullException(); } // get the page data byte[] dataBuffer = new byte[PageSize]; this.WritePageData(dataBuffer); // write to the file stream if (0 > pageIdx || stream.Length < pageIdx * PageSize) { pageIdx = (int)(stream.Length / PageSize); } stream.Seek(pageIdx * PageSize, SeekOrigin.Begin); stream.Write(dataBuffer, 0, dataBuffer.Length); return(pageIdx); }
[InlineData(10)] // Internal buffer too small, forces a flush public void WriteData(int internalBufferLength) { string fileName = Path.GetTempFileName(); byte[] bytesToWrite = Encoding.Unicode.GetBytes("hello"); try { // If: // ... I have a file stream that has been initialized // ... And I write some bytes to it using (FileStreamWrapper fsw = new FileStreamWrapper()) { fsw.Init(fileName, internalBufferLength, FileAccess.ReadWrite); int bytesWritten = fsw.WriteData(bytesToWrite, bytesToWrite.Length); Assert.Equal(bytesToWrite.Length, bytesWritten); } // Then: // ... The file I wrote to should contain only the bytes I wrote out using (FileStream fs = File.OpenRead(fileName)) { byte[] readBackBytes = new byte[1024]; int bytesRead = fs.Read(readBackBytes, 0, readBackBytes.Length); Assert.Equal(bytesToWrite.Length, bytesRead); // If bytes read is not equal, then more or less of the original string was written to the file Assert.True(bytesToWrite.SequenceEqual(readBackBytes.Take(bytesRead))); } } finally { // Cleanup: // ... Delete the test file CleanupTestFile(fileName); } }
/// <summary> /// Reads the list of data items whose head is stored at the page index provided. /// </summary> /// <param name="stream">data file to read from</param> /// <param name="pageIdx">index of the first physical page storing the list</param> /// <returns>returns the index of the first physical page we read data from</returns> public int ReadTransactionTableData(FileStreamWrapper stream, int pageIdx) { List <TransItem> itemList = null; List <int> pageIdxList = null; // create reader ListReader <TransItem> reader = new ListReader <TransItem>(); reader.ReadList(stream, pageIdx, out itemList, out pageIdxList); // clear the current data this.contextTable.Clear(); foreach (TransItem item in itemList) { this.contextTable.Add(item.Transaction, item); } // update page index this.contextTableStoragePages = pageIdxList; // return index of the first page return(this.contextTableStoragePages[0]); }
public static bool IsDirectory(string path) { StreamWrapper wrapper; if (!string.IsNullOrEmpty(path) && StatInternalCheck(ref path, false, out wrapper)) // do not throw warning if path is null or empty { string url; if (StatInternalTryCache(path, out url)) { return(((FileModeFlags)statCache.st_mode & FileModeFlags.Directory) != 0); } // we can't just call Directory.Exists since we have to throw warnings // also we are not calling full stat(), it is slow return(FileStreamWrapper.HandleNewFileSystemInfo(false, path, (p) => new DirectoryInfo(p).Exists)); } return(false); //bool ok = !string.IsNullOrEmpty(path) && StatInternal(path, false); // do not throw warning if path is null or empty //if (!ok) return false; //return ((FileModeFlags)statCache.st_mode & FileModeFlags.Directory) > 0; }
/// <summary> /// Writes the data item to persitent storage as a list of items. /// </summary> /// <param name="stream">data file to write to</param> /// <param name="manager">object that keeps track of free pages in the file</param> /// <param name="freedPages">list of pages to be freed when transaction commits</param> /// <returns>index of the first page storing the list</returns> public int WritePageManagerData(FileStreamWrapper stream) { lock (this.freePages) { this.writingSelf = true; // make the list of pages to write this.freePages.Insert(0, SelfWriteBarrier); this.freePages.InsertRange(0, this.managerStoragePages); // create writer ListWriter<int> writer = new ListWriter<int>(); writer.WriteList(stream, this, this.freePages, out this.managerStoragePages); this.writingSelf = false; } return this.managerStoragePages[0]; }
public HttpResponseMessage MiniDump(int id, int dumpType = 0, string format = null) { using (_tracer.Step("ProcessController.MiniDump")) { DumpFormat dumpFormat = ParseDumpFormat(format, DumpFormat.Raw); if (dumpFormat != DumpFormat.Raw && dumpFormat != DumpFormat.Zip) { return(Request.CreateErrorResponse(HttpStatusCode.BadRequest, String.Format(CultureInfo.CurrentCulture, Resources.Error_DumpFormatNotSupported, dumpFormat))); } string sitePolicy = _settings.GetWebSitePolicy(); if ((MINIDUMP_TYPE)dumpType == MINIDUMP_TYPE.WithFullMemory && sitePolicy.Equals(FreeSitePolicy, StringComparison.OrdinalIgnoreCase)) { return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, String.Format(CultureInfo.CurrentCulture, Resources.Error_FullMiniDumpNotSupported, sitePolicy))); } var process = GetProcessById(id); string dumpFile = Path.Combine(_environment.LogFilesPath, "minidump", "minidump.dmp"); FileSystemHelpers.EnsureDirectory(_fileSystem, Path.GetDirectoryName(dumpFile)); FileSystemHelpers.DeleteFileSafe(_fileSystem, dumpFile); try { using (_tracer.Step(String.Format("MiniDump pid={0}, name={1}, file={2}", process.Id, process.ProcessName, dumpFile))) { process.MiniDump(dumpFile, (MINIDUMP_TYPE)dumpType); _tracer.Trace("MiniDump size={0}", new FileInfo(dumpFile).Length); } } catch (Exception ex) { _tracer.TraceError(ex); FileSystemHelpers.DeleteFileSafe(_fileSystem, dumpFile); return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, ex.Message)); } if (dumpFormat == DumpFormat.Raw) { string responseFileName = GetResponseFileName(process.ProcessName, "dmp"); HttpResponseMessage response = Request.CreateResponse(); response.Content = new StreamContent(FileStreamWrapper.OpenRead(dumpFile, _fileSystem)); response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); response.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment"); response.Content.Headers.ContentDisposition.FileName = responseFileName; return(response); } else if (dumpFormat == DumpFormat.Zip) { string responseFileName = GetResponseFileName(process.ProcessName, "zip"); HttpResponseMessage response = Request.CreateResponse(); response.Content = ZipStreamContent.Create(responseFileName, _tracer, zip => { try { zip.AddFile(dumpFile, String.Empty); } finally { FileSystemHelpers.DeleteFileSafe(_fileSystem, dumpFile); } foreach (var fileName in new[] { "sos.dll", "mscordacwks.dll" }) { string filePath = Path.Combine(ProcessExtensions.ClrRuntimeDirectory, fileName); if (_fileSystem.File.Exists(filePath)) { zip.AddFile(filePath, String.Empty); } } }); return(response); } else { return(Request.CreateErrorResponse(HttpStatusCode.BadRequest, String.Format(CultureInfo.CurrentCulture, Resources.Error_DumpFormatNotSupported, dumpFormat))); } } }
/// <summary> /// Writes the data item to persitent storage as a list of items. /// </summary> /// <param name="stream">data file to write to</param> /// <param name="manager">object that keeps track of free pages in the file</param> /// <param name="freedPages">list of pages to be freed when transaction commits</param> /// <returns>index of the first page storing the list</returns> public int WriteTransactionTableData(FileStreamWrapper stream, StoragePageManager manager, out List<int> freedPages) { List<int> pageIdxList = null; // create the writer ListWriter<TransItem> writer = new ListWriter<TransItem>(); writer.WriteList(stream, manager, this.contextTable.Values.ToList() , out pageIdxList); // update the list that stores the physical page idx freedPages = this.contextTableStoragePages; this.contextTableStoragePages = pageIdxList; // return the index of the first page return this.contextTableStoragePages[0]; }
public int WritePageData(FileStreamWrapper stream, int pageIdx) { if (null == stream) { throw new ArgumentNullException(); } // get the page data byte[] dataBuffer = new byte[PageSize]; this.WritePageData(dataBuffer); // write to the file stream if (0 > pageIdx || stream.Length < pageIdx * PageSize) { pageIdx = (int)(stream.Length / PageSize); } stream.Seek(pageIdx * PageSize, SeekOrigin.Begin); stream.Write(dataBuffer, 0, dataBuffer.Length); return pageIdx; }
/// <summary> /// Reads the list of page table items whose head is stored at the page index provided. /// The data read from persistent storage is merges with the data already in memory /// using the following protocol: /// if the in memory data has the "IsDirty" flag set then we keep the in memory data /// else we over-write the in-memory data with the data from disk /// </summary> /// <param name="stream">data file to read from</param> /// <param name="pageIdx">index of the first physical page storing the list</param> /// <returns>returns the index of the first physical page we read data from</returns> public int ReadPageTableData(FileStreamWrapper stream, int pageIdx) { List<PageTableItem> itemList = null; List<int> pageIdxList = null; // create reader ListReader<PageTableItem> reader = new ListReader<PageTableItem>(); reader.ReadList(stream, pageIdx, out itemList, out pageIdxList); // merge with current data for (int idx = 0; idx < this.pageTable.Count && idx < itemList.Count; idx++) { if (!this.pageTable[idx].IsDirty) { this.pageTable[idx] = itemList[idx]; } } // add the missing ones if (this.pageTable.Count < itemList.Count) { this.pageTable.AddRange( itemList.GetRange( this.pageTable.Count, (itemList.Count - this.pageTable.Count))); } // update page index this.pageTableStoragePages = pageIdxList; // return index of the first page return this.pageTableStoragePages[0]; }