public void MakeSparse(int index) { if (index < _firstDirty) { _firstDirty = index; } if (index > _lastDirty) { _lastDirty = index; } long prevLcn = index == 0 ? 0 : _runs[index - 1].StartLcn; CookedDataRun run = _runs[index]; if (run.IsSparse) { throw new ArgumentException("Run is already sparse", "index"); } _runs[index] = new CookedDataRun(new DataRun(0, run.Length, true), run.StartVcn, prevLcn, run.AttributeExtent); run.AttributeExtent.ReplaceRun(run.DataRun, _runs[index].DataRun); for (int i = index + 1; i < _runs.Count; ++i) { if (!_runs[i].IsSparse) { _runs[i].DataRun.RunOffset += run.StartLcn - prevLcn; break; } } }
public int FindDataRun(long vcn, int startIdx) { int numRuns = _runs.Count; if (numRuns > 0) { CookedDataRun run = _runs[numRuns - 1]; if (vcn >= run.StartVcn) { if (run.StartVcn + run.Length > vcn) { return(numRuns - 1); } else { throw new IOException("Looking for VCN outside of data runs"); } } for (int i = startIdx; i < numRuns; ++i) { run = _runs[i]; if (run.StartVcn + run.Length > vcn) { return(i); } } } throw new IOException("Looking for VCN outside of data runs"); }
public override int WriteClusters(long startVcn, int count, byte[] buffer, int offset) { if (buffer.Length < (count * _bytesPerCluster) + offset) { throw new ArgumentException("Cluster buffer too small", "buffer"); } int runIdx = 0; int totalWritten = 0; while (totalWritten < count) { long focusVcn = startVcn + totalWritten; runIdx = _cookedRuns.FindDataRun(focusVcn, runIdx); CookedDataRun run = _cookedRuns[runIdx]; if (run.IsSparse) { throw new NotImplementedException("Writing to sparse datarun"); } int toWrite = (int)Math.Min(count - totalWritten, run.Length - (focusVcn - run.StartVcn)); long lcn = _cookedRuns[runIdx].StartLcn + (focusVcn - run.StartVcn); _fsStream.Position = lcn * _bytesPerCluster; _fsStream.Write(buffer, offset + (totalWritten * _bytesPerCluster), toWrite * _bytesPerCluster); totalWritten += toWrite; } return(0); }
public void MakeNonSparse(int index, IEnumerable <DataRun> rawRuns) { if (index < _firstDirty) { _firstDirty = index; } if (index > _lastDirty) { _lastDirty = index; } long prevLcn = index == 0 ? 0 : _runs[index - 1].StartLcn; CookedDataRun run = _runs[index]; if (!run.IsSparse) { throw new ArgumentException("Run is already non-sparse", "index"); } _runs.RemoveAt(index); int insertIdx = run.AttributeExtent.RemoveRun(run.DataRun); CookedDataRun lastNewRun = null; long lcn = prevLcn; long vcn = run.StartVcn; foreach (var rawRun in rawRuns) { CookedDataRun newRun = new CookedDataRun(rawRun, vcn, lcn, run.AttributeExtent); _runs.Insert(index, newRun); run.AttributeExtent.InsertRun(insertIdx, rawRun); vcn += rawRun.RunLength; lcn += rawRun.RunOffset; lastNewRun = newRun; insertIdx++; index++; } for (int i = index; i < _runs.Count; ++i) { if (_runs[i].IsSparse) { _runs[i].StartLcn = lastNewRun.StartLcn; } else { _runs[i].DataRun.RunOffset = _runs[i].StartLcn - lastNewRun.StartLcn; break; } } }
public int ReleaseClusters(long startVcn, int count) { int runIdx = 0; int totalReleased = 0; long focus = startVcn; while (focus < startVcn + count) { runIdx = _cookedRuns.FindDataRun(focus, runIdx); CookedDataRun run = _cookedRuns[runIdx]; if (run.IsSparse) { focus += run.Length; } else { if (focus != run.StartVcn) { _cookedRuns.SplitRun(runIdx, focus); runIdx++; run = _cookedRuns[runIdx]; } long numClusters = Math.Min((startVcn + count) - focus, run.Length); if (numClusters != run.Length) { _cookedRuns.SplitRun(runIdx, focus + numClusters); run = _cookedRuns[runIdx]; } _context.ClusterBitmap.FreeClusters(new Range <long, long>(run.StartLcn, run.Length)); _cookedRuns.MakeSparse(runIdx); totalReleased += (int)run.Length; focus += numClusters; } } return(totalReleased); }
public bool AreAllClustersStored(long vcn, int count) { int runIdx = 0; long focusVcn = vcn; while (focusVcn < vcn + count) { runIdx = _cookedRuns.FindDataRun(focusVcn, runIdx); CookedDataRun run = _cookedRuns[runIdx]; if (run.IsSparse) { return(false); } focusVcn = run.StartVcn + run.Length; } return(true); }
public void SplitRun(int runIdx, long vcn) { if (runIdx < _firstDirty) { _firstDirty = runIdx; } if (runIdx > _lastDirty) { _lastDirty = runIdx; } CookedDataRun run = _runs[runIdx]; if (run.StartVcn >= vcn || run.StartVcn + run.Length <= vcn) { throw new ArgumentException("Attempt to split run outside of it's range", "vcn"); } long distance = vcn - run.StartVcn; long offset = run.IsSparse ? 0 : distance; CookedDataRun newRun = new CookedDataRun(new DataRun(offset, run.Length - distance, run.IsSparse), vcn, run.StartLcn, run.AttributeExtent); run.Length = distance; _runs.Insert(runIdx + 1, newRun); run.AttributeExtent.InsertRun(run.DataRun, newRun.DataRun); for (int i = runIdx + 2; i < _runs.Count; ++i) { if (_runs[i].IsSparse) { _runs[i].StartLcn += offset; } else { _runs[i].DataRun.RunOffset -= offset; break; } } }
public override void ReadClusters(long startVcn, int count, byte[] buffer, int offset) { if (buffer.Length < (count * _bytesPerCluster) + offset) { throw new ArgumentException("Cluster buffer too small", "buffer"); } int runIdx = 0; int totalRead = 0; while (totalRead < count) { long focusVcn = startVcn + totalRead; runIdx = _cookedRuns.FindDataRun(focusVcn, runIdx); CookedDataRun run = _cookedRuns[runIdx]; int toRead = (int)Math.Min(count - totalRead, run.Length - (focusVcn - run.StartVcn)); if (run.IsSparse) { Array.Clear(buffer, offset + (totalRead * _bytesPerCluster), toRead * _bytesPerCluster); } else { long lcn = _cookedRuns[runIdx].StartLcn + (focusVcn - run.StartVcn); _fsStream.Position = lcn * _bytesPerCluster; int numRead = Utilities.ReadFully(_fsStream, buffer, offset + (totalRead * _bytesPerCluster), toRead * _bytesPerCluster); if (numRead != toRead * _bytesPerCluster) { throw new IOException(string.Format(CultureInfo.InvariantCulture, "Short read, reading {0} clusters starting at LCN {1}", toRead, lcn)); } } totalRead += toRead; } }
public void Append(DataRun rawRun, NonResidentAttributeRecord attributeExtent) { CookedDataRun last = Last; _runs.Add(new CookedDataRun(rawRun, NextVirtualCluster, last == null ? 0 : last.StartLcn, attributeExtent)); }
public int AllocateClusters(long startVcn, int count) { if (startVcn + count > _cookedRuns.NextVirtualCluster) { throw new IOException("Attempt to allocate unknown clusters"); } int totalAllocated = 0; int runIdx = 0; long focus = startVcn; while (focus < startVcn + count) { runIdx = _cookedRuns.FindDataRun(focus, runIdx); CookedDataRun run = _cookedRuns[runIdx]; if (run.IsSparse) { if (focus != run.StartVcn) { _cookedRuns.SplitRun(runIdx, focus); runIdx++; run = _cookedRuns[runIdx]; } long numClusters = Math.Min((startVcn + count) - focus, run.Length); if (numClusters != run.Length) { _cookedRuns.SplitRun(runIdx, focus + numClusters); run = _cookedRuns[runIdx]; } long nextCluster = -1; for (int i = runIdx - 1; i >= 0; --i) { if (!_cookedRuns[i].IsSparse) { nextCluster = _cookedRuns[i].StartLcn + _cookedRuns[i].Length; break; } } var alloced = _context.ClusterBitmap.AllocateClusters(numClusters, nextCluster, _isMft, AllocatedClusterCount); List <DataRun> runs = new List <DataRun>(); long lcn = runIdx == 0 ? 0 : _cookedRuns[runIdx - 1].StartLcn; foreach (var allocation in alloced) { runs.Add(new DataRun(allocation.First - lcn, allocation.Second, false)); lcn = allocation.First; } _cookedRuns.MakeNonSparse(runIdx, runs); totalAllocated += (int)numClusters; focus += numClusters; } else { focus = run.StartVcn + run.Length; } } return(totalAllocated); }
public void MakeNonSparse(int index, IEnumerable<DataRun> rawRuns) { if (index < _firstDirty) { _firstDirty = index; } if (index > _lastDirty) { _lastDirty = index; } long prevLcn = index == 0 ? 0 : _runs[index - 1].StartLcn; CookedDataRun run = _runs[index]; if (!run.IsSparse) { throw new ArgumentException("Run is already non-sparse", "index"); } _runs.RemoveAt(index); int insertIdx = run.AttributeExtent.RemoveRun(run.DataRun); CookedDataRun lastNewRun = null; long lcn = prevLcn; long vcn = run.StartVcn; foreach (var rawRun in rawRuns) { CookedDataRun newRun = new CookedDataRun(rawRun, vcn, lcn, run.AttributeExtent); _runs.Insert(index, newRun); run.AttributeExtent.InsertRun(insertIdx, rawRun); vcn += rawRun.RunLength; lcn += rawRun.RunOffset; lastNewRun = newRun; insertIdx++; index++; } for (int i = index; i < _runs.Count; ++i) { if (_runs[i].IsSparse) { _runs[i].StartLcn = lastNewRun.StartLcn; } else { _runs[i].DataRun.RunOffset = _runs[i].StartLcn - lastNewRun.StartLcn; break; } } }