public void InsertRun(DataRun existingRun, DataRun newRun)
        {
            int idx = _dataRuns.IndexOf(existingRun);

            if (idx < 0)
            {
                throw new ArgumentException("Attempt to replace non-existant run", "existingRun");
            }

            _dataRuns.Insert(idx + 1, newRun);
        }
        public void ReplaceRun(DataRun oldRun, DataRun newRun)
        {
            int idx = _dataRuns.IndexOf(oldRun);

            if (idx < 0)
            {
                throw new ArgumentException("Attempt to replace non-existant run", "oldRun");
            }

            _dataRuns[idx] = newRun;
        }
        public int RemoveRun(DataRun run)
        {
            int idx = _dataRuns.IndexOf(run);

            if (idx < 0)
            {
                throw new ArgumentException("Attempt to remove non-existant run", "run");
            }

            _dataRuns.RemoveAt(idx);
            return(idx);
        }
        public AttributeRecord Split(int suggestedSplitIdx)
        {
            int splitIdx;

            if (suggestedSplitIdx < 0 || suggestedSplitIdx >= _dataRuns.Count)
            {
                splitIdx = _dataRuns.Count / 2;
            }
            else
            {
                splitIdx = suggestedSplitIdx;
            }

            long splitVcn = (long)_startingVCN;
            long splitLcn = 0;

            for (int i = 0; i < splitIdx; ++i)
            {
                splitVcn += _dataRuns[i].RunLength;
                splitLcn += _dataRuns[i].RunOffset;
            }

            List <DataRun> newRecordRuns = new List <DataRun>();

            while (_dataRuns.Count > splitIdx)
            {
                DataRun run = _dataRuns[splitIdx];

                _dataRuns.RemoveAt(splitIdx);
                newRecordRuns.Add(run);
            }

            // Each extent has implicit start LCN=0, so have to make stored runs match reality.
            // However, take care not to stomp on 'sparse' runs that may be at the start of the
            // new extent (indicated by Zero run offset).
            for (int i = 0; i < newRecordRuns.Count; ++i)
            {
                if (newRecordRuns[i].RunOffset != 0)
                {
                    newRecordRuns[i].RunOffset += splitLcn;
                    break;
                }
            }

            _lastVCN = (ulong)splitLcn - 1;

            return(new NonResidentAttributeRecord(_type, _name, 0, _flags, splitVcn, newRecordRuns));
        }
Example #5
0
        public CookedDataRun(DataRun raw, long startVcn, long prevLcn, NonResidentAttributeRecord attributeExtent)
        {
            _raw             = raw;
            _startVcn        = startVcn;
            _startLcn        = prevLcn + raw.RunOffset;
            _attributeExtent = attributeExtent;

            if (startVcn < 0)
            {
                throw new ArgumentOutOfRangeException("startVcn", startVcn, "VCN must be >= 0");
            }

            if (_startLcn < 0)
            {
                throw new ArgumentOutOfRangeException("prevLcn", prevLcn, "LCN must be >= 0");
            }
        }
Example #6
0
        public override void ExpandToClusters(long numVirtualClusters, NonResidentAttributeRecord extent, bool allocate)
        {
            long totalVirtualClusters = _cookedRuns.NextVirtualCluster;

            if (totalVirtualClusters < numVirtualClusters)
            {
                NonResidentAttributeRecord realExtent = extent;
                if (realExtent == null)
                {
                    realExtent = _cookedRuns.Last.AttributeExtent;
                }

                DataRun newRun = new DataRun(0, numVirtualClusters - totalVirtualClusters, true);
                realExtent.DataRuns.Add(newRun);
                _cookedRuns.Append(newRun, extent);
                realExtent.LastVcn = numVirtualClusters - 1;
            }

            if (allocate)
            {
                AllocateClusters(totalVirtualClusters, (int)(numVirtualClusters - totalVirtualClusters));
            }
        }
        protected override void Read(byte[] buffer, int offset, out int length)
        {
            _dataRuns = null;

            base.Read(buffer, offset, out length);

            _startingVCN         = Utilities.ToUInt64LittleEndian(buffer, offset + 0x10);
            _lastVCN             = Utilities.ToUInt64LittleEndian(buffer, offset + 0x18);
            _dataRunsOffset      = Utilities.ToUInt16LittleEndian(buffer, offset + 0x20);
            _compressionUnitSize = Utilities.ToUInt16LittleEndian(buffer, offset + 0x22);
            _dataAllocatedSize   = Utilities.ToUInt64LittleEndian(buffer, offset + 0x28);
            _dataRealSize        = Utilities.ToUInt64LittleEndian(buffer, offset + 0x30);
            _initializedDataSize = Utilities.ToUInt64LittleEndian(buffer, offset + 0x38);
            if ((Flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0 && _dataRunsOffset > 0x40)
            {
                _compressedSize = Utilities.ToUInt64LittleEndian(buffer, offset + 0x40);
            }

            _dataRuns = new List <DataRun>();
            int pos = _dataRunsOffset;

            while (pos < length)
            {
                DataRun run = new DataRun();
                int     len = run.Read(buffer, offset + pos);

                // Length 1 means there was only a header byte (i.e. terminator)
                if (len == 1)
                {
                    break;
                }

                _dataRuns.Add(run);
                pos += len;
            }
        }
Example #8
0
        public void Append(DataRun rawRun, NonResidentAttributeRecord attributeExtent)
        {
            CookedDataRun last = Last;

            _runs.Add(new CookedDataRun(rawRun, NextVirtualCluster, last == null ? 0 : last.StartLcn, attributeExtent));
        }
 public void InsertRun(int index, DataRun newRun)
 {
     _dataRuns.Insert(index, newRun);
 }