public ActionResult SaveRecord(RecordLocatorPassenger record) { var recordId = 0; using (var uow = new UnitOfWork(new Models.PassengerNameListDbContext())) { var fileRecord = uow.UploadFileRepo.Get(record.FileId); var recordDb = new RecordLocator(); recordDb.UploadFileId = record.FileId; recordDb.Code = record.RecordLocator; uow.RecordLocatorRepo.Add(recordDb); uow.Complete(); foreach (var item in record.Passengers) { var passenger = new Passenger(); passenger.Name = item; passenger.RecordLocatorId = recordDb.Id; uow.PassengerRepo.Add(passenger); fileRecord.FileContent += Environment.NewLine + "1" + record.RecordLocator; } uow.Complete(); recordId = recordDb.Id; } var jsonResult = new JsonResult(); var addResult = new AddRecordResult { RecordLocatorId = recordId }; jsonResult.Data = addResult; jsonResult.JsonRequestBehavior = JsonRequestBehavior.AllowGet; return(jsonResult); }
public static async Task OblivionESM_GroupMask_Export(TestingSettings settings, Target target) { var mod = OblivionMod.CreateFromBinary( new ModPath( Mutagen.Bethesda.Oblivion.Constants.Oblivion, Path.Combine(settings.DataFolderLocations.Oblivion, target.Path))); using var tmp = new TempFolder("Mutagen_Oblivion_Binary_GroupMask_Export"); var oblivionOutputPath = Path.Combine(tmp.Dir.Path, TestingConstants.OBLIVION_ESM); mod.WriteToBinary( oblivionOutputPath, importMask: new GroupMask() { Npcs = true }); var fileLocs = RecordLocator.GetFileLocations(oblivionOutputPath, GameRelease.Oblivion); using var reader = new BinaryReadStream(oblivionOutputPath); foreach (var rec in fileLocs.ListedRecords.Keys) { reader.Position = rec; var t = HeaderTranslation.ReadNextRecordType(reader); if (!t.Equals(Oblivion.Internals.RecordTypes.NPC_)) { throw new ArgumentException("Exported a non-NPC record."); } } }
public static void Sort( Func <IMutagenReadStream> streamCreator, Stream outputStream, GameRelease release) { using var inputStream = streamCreator(); using var locatorStream = streamCreator(); using var writer = new MutagenWriter(outputStream, release, dispose: false); while (!inputStream.Complete) { long noRecordLength; foreach (var grupLoc in RecordLocator.IterateBaseGroupLocations(locatorStream)) { noRecordLength = grupLoc.Value - inputStream.Position; inputStream.WriteTo(writer.BaseStream, (int)noRecordLength); // If complete overall, return if (inputStream.Complete) { return; } var groupMeta = inputStream.GetGroup(); var storage = new Dictionary <FormKey, List <ReadOnlyMemorySlice <byte> > >(); using (var grupFrame = new MutagenFrame(inputStream).SpawnWithLength(groupMeta.TotalLength)) { inputStream.WriteTo(writer.BaseStream, inputStream.MetaData.Constants.GroupConstants.HeaderLength); locatorStream.Position = grupLoc.Value; foreach (var rec in RecordLocator.ParseTopLevelGRUP(locatorStream)) { MajorRecordHeader majorMeta = inputStream.GetMajorRecord(); storage.GetOrAdd(rec.FormKey).Add(inputStream.ReadMemory(checked ((int)majorMeta.TotalLength), readSafe: true)); if (grupFrame.Complete) { continue; } if (inputStream.TryGetGroup(out var subGroupMeta)) { storage.GetOrAdd(rec.FormKey).Add(inputStream.ReadMemory(checked ((int)subGroupMeta.TotalLength), readSafe: true)); } } } // Sorts via Record ID (as opposed to just the first 6 bytes) foreach (var item in storage.OrderBy((i) => i.Key.ID)) { foreach (var bytes in item.Value) { writer.Write(bytes); } } } inputStream.WriteTo(writer.BaseStream, (int)inputStream.Remaining); } }
/// <summary> /// Trims mod stream of uninteresting record types /// </summary> /// <param name="streamCreator">A func to create an input stream</param> /// <param name="outputStream">Stream to write output to</param> /// <param name="interest">Specification of which record types to include</param> public static void Trim( Func <IMutagenReadStream> streamCreator, Stream outputStream, RecordInterest interest) { using var inputStream = streamCreator(); if (inputStream.Complete) { return; } using var writer = new System.IO.BinaryWriter(outputStream, Encoding.Default, leaveOpen: true); var fileLocs = RecordLocator.GetLocations( inputStream, interest: interest); // Import until first listed grup inputStream.Position = 0; if (!fileLocs.GrupLocations.TryGetInDirection( inputStream.Position, higher: true, result: out var nextRec)) { return; } var recordLocation = fileLocs.ListedRecords.Keys[nextRec.Key]; var noRecordLength = recordLocation - inputStream.Position - inputStream.MetaData.Constants.GroupConstants.HeaderLength; inputStream.WriteTo(outputStream, (int)noRecordLength); while (!inputStream.Complete) { var groupMeta = inputStream.GetGroup(readSafe: true); if (interest.IsInterested(groupMeta.ContainedRecordType)) { inputStream.WriteTo(outputStream, checked ((int)groupMeta.TotalLength)); } else { inputStream.Position += groupMeta.TotalLength; } } }
public bool IsOfRecordType <T>(FormID formID) where T : IMajorRecordCommonGetter { if (formID == FormID.Null) { return(false); } lock (_cachedLocs) { if (!_cachedLocs.TryGetValue(typeof(T), out var cache)) { using var stream = _streamCreator(); var locs = RecordLocator.GetFileLocations( stream, new RecordInterest( interestingTypes: UtilityTranslation.GetRecordType <T>())); cache = locs.FormIDs.ToHashSet(); _cachedLocs.Add(typeof(T), cache); } return(cache.Contains(formID)); } }
public static void Align( ModPath inputPath, FilePath outputPath, GameRelease gameMode, AlignmentRules alignmentRules, DirectoryPath temp) { var interest = new RecordInterest(alignmentRules.Alignments.Keys) { EmptyMeansInterested = false }; // Always interested in parent record types interest.InterestingTypes.Add("CELL"); interest.InterestingTypes.Add("WRLD"); var fileLocs = RecordLocator.GetLocations(inputPath, gameMode, interest); if (gameMode == GameRelease.Oblivion) { var alignedMajorRecordsFile = new ModPath(inputPath.ModKey, Path.Combine(temp, "alignedRules")); using (var inputStream = new MutagenBinaryReadStream(inputPath, gameMode)) { using var writer = new MutagenWriter(new FileStream(alignedMajorRecordsFile, FileMode.Create), gameMode); AlignMajorRecordsByRules(inputStream, writer, alignmentRules, fileLocs); } var alignedGroupsFile = new ModPath(inputPath.ModKey, Path.Combine(temp, "alignedGroups")); using (var inputStream = new MutagenBinaryReadStream(alignedMajorRecordsFile, gameMode)) { using var writer = new MutagenWriter(new FileStream(alignedGroupsFile, FileMode.Create), gameMode); AlignGroupsByRules(inputStream, writer, alignmentRules, fileLocs); } fileLocs = RecordLocator.GetLocations(alignedGroupsFile, gameMode, interest); var alignedCellsFile = new ModPath(inputPath.ModKey, Path.Combine(temp, "alignedCells")); using (var mutaReader = new BinaryReadStream(alignedGroupsFile)) { using var writer = new MutagenWriter(alignedCellsFile, gameMode); foreach (var grup in fileLocs.GrupLocations.Keys) { if (grup <= mutaReader.Position) { continue; } var noRecordLength = grup - mutaReader.Position; mutaReader.WriteTo(writer.BaseStream, (int)noRecordLength); // If complete overall, return if (mutaReader.Complete) { break; } mutaReader.WriteTo(writer.BaseStream, 12); var grupType = mutaReader.ReadInt32(); writer.Write(grupType); if (writer.MetaData.Constants.GroupConstants.Cell.TopGroupType == grupType) { AlignCellChildren(mutaReader, writer); } } mutaReader.WriteTo(writer.BaseStream, checked ((int)mutaReader.Remaining)); } fileLocs = RecordLocator.GetLocations(alignedCellsFile, gameMode, interest); using (var mutaReader = new MutagenBinaryReadStream(alignedCellsFile, gameMode)) { using var writer = new MutagenWriter(outputPath.Path, gameMode); foreach (var grup in fileLocs.GrupLocations.Keys) { if (grup <= mutaReader.Position) { continue; } var noRecordLength = grup - mutaReader.Position; mutaReader.WriteTo(writer.BaseStream, (int)noRecordLength); // If complete overall, return if (mutaReader.Complete) { break; } mutaReader.WriteTo(writer.BaseStream, 12); var grupType = mutaReader.ReadInt32(); writer.Write(grupType); if (writer.MetaData.Constants.GroupConstants.World.TopGroupType == grupType) { AlignWorldChildren(mutaReader, writer); } } mutaReader.WriteTo(writer.BaseStream, checked ((int)mutaReader.Remaining)); } } else { var alignedMajorRecordsFile = new ModPath(inputPath.ModKey, Path.Combine(temp, "alignedRules")); using (var inputStream = new MutagenBinaryReadStream(inputPath, gameMode)) { using var writer = new MutagenWriter(alignedMajorRecordsFile, gameMode); AlignMajorRecordsByRules(inputStream, writer, alignmentRules, fileLocs); } var alignedGroupsFile = Path.Combine(temp, "alignedGroups"); using (var inputStream = new MutagenBinaryReadStream(alignedMajorRecordsFile, gameMode)) { using var writer = new MutagenWriter(new FileStream(outputPath.Path, FileMode.Create), gameMode); AlignGroupsByRules(inputStream, writer, alignmentRules, fileLocs); } } }
/// <summary> /// Decompresses mod stream into an output. /// Will open up two input streams, so a Func factory is given as input. /// </summary> /// <param name="streamCreator">A func to create an input stream</param> /// <param name="outputStream">Stream to write output to</param> /// <param name="interest">Optional specification of which record types to process</param> public static void Decompress( Func <IMutagenReadStream> streamCreator, Stream outputStream, RecordInterest?interest = null) { using var inputStream = streamCreator(); using var inputStreamJumpback = streamCreator(); using var writer = new System.IO.BinaryWriter(outputStream, Encoding.Default, leaveOpen: true); long runningDiff = 0; var fileLocs = RecordLocator.GetFileLocations( inputStream, interest: interest, additionalCriteria: (stream, recType, len) => { return(stream.GetMajorRecord().IsCompressed); }); // Construct group length container for later use Dictionary <long, (uint Length, long Offset)> grupMeta = new Dictionary <long, (uint Length, long Offset)>(); inputStream.Position = 0; while (!inputStream.Complete) { // Import until next listed major record long noRecordLength; if (fileLocs.ListedRecords.TryGetInDirection( inputStream.Position, higher: true, result: out var nextRec)) { var recordLocation = fileLocs.ListedRecords.Keys[nextRec.Key]; noRecordLength = recordLocation - inputStream.Position; } else { noRecordLength = inputStream.Length - inputStream.Position; } inputStream.WriteTo(outputStream, (int)noRecordLength); // If complete overall, return if (inputStream.Complete) { break; } var majorMeta = inputStream.ReadMajorRecord(readSafe: true); var len = majorMeta.ContentLength; using (var frame = MutagenFrame.ByLength( reader: inputStream, length: len)) { // Decompress var decompressed = frame.Decompress(); var decompressedLen = decompressed.TotalLength; var lengthDiff = decompressedLen - len; var majorMetaSpan = majorMeta.HeaderData.ToArray(); // Write major Meta var writableMajorMeta = inputStream.MetaData.Constants.MajorRecordWritable(majorMetaSpan.AsSpan()); writableMajorMeta.IsCompressed = false; writableMajorMeta.ContentLength = (uint)(len + lengthDiff); writer.Write(majorMetaSpan); writer.Write(decompressed.ReadRemainingSpan(readSafe: false)); // If no difference in lengths, move on if (lengthDiff == 0) { continue; } // Modify parent group lengths foreach (var grupLoc in fileLocs.GetContainingGroupLocations(nextRec.Value.FormKey)) { if (!grupMeta.TryGetValue(grupLoc, out var loc)) { loc.Offset = runningDiff; inputStreamJumpback.Position = grupLoc + 4; loc.Length = inputStreamJumpback.ReadUInt32(); } grupMeta[grupLoc] = ((uint)(loc.Length + lengthDiff), loc.Offset); } runningDiff += lengthDiff; } } foreach (var item in grupMeta) { var grupLoc = item.Key; outputStream.Position = grupLoc + 4 + item.Value.Offset; writer.Write(item.Value.Length); } }
public static void Align( ModPath inputPath, FilePath outputPath, GameRelease release, AlignmentRules alignmentRules, TempFolder?temp = null) { var interest = new RecordInterest(alignmentRules.Alignments.Keys) { EmptyMeansInterested = false }; var parsingBundle = new ParsingBundle(GameConstants.Get(release), MasterReferenceReader.FromPath(inputPath, release)); var fileLocs = RecordLocator.GetFileLocations(inputPath.Path, release, interest); temp ??= new TempFolder(); using (temp) { var alignedMajorRecordsFile = Path.Combine(temp.Dir.Path, "alignedRules"); using (var inputStream = new MutagenBinaryReadStream(inputPath.Path, parsingBundle)) { using var writer = new MutagenWriter(new FileStream(alignedMajorRecordsFile, FileMode.Create), release); AlignMajorRecordsByRules(inputStream, writer, alignmentRules, fileLocs); } var alignedGroupsFile = Path.Combine(temp.Dir.Path, "alignedGroups"); using (var inputStream = new MutagenBinaryReadStream(alignedMajorRecordsFile, parsingBundle)) { using var writer = new MutagenWriter(new FileStream(alignedGroupsFile, FileMode.Create), release); AlignGroupsByRules(inputStream, writer, alignmentRules, fileLocs); } fileLocs = RecordLocator.GetFileLocations(alignedGroupsFile, release, interest); var alignedCellsFile = Path.Combine(temp.Dir.Path, "alignedCells"); using (var mutaReader = new BinaryReadStream(alignedGroupsFile)) { using var writer = new MutagenWriter(alignedCellsFile, release); foreach (var grup in fileLocs.GrupLocations) { if (grup <= mutaReader.Position) { continue; } var noRecordLength = grup - mutaReader.Position; mutaReader.WriteTo(writer.BaseStream, (int)noRecordLength); // If complete overall, return if (mutaReader.Complete) { break; } mutaReader.WriteTo(writer.BaseStream, 12); var grupType = (GroupTypeEnum)mutaReader.ReadUInt32(); writer.Write((int)grupType); switch (grupType) { case GroupTypeEnum.CellChildren: AlignCellChildren(mutaReader, writer); break; default: break; } } mutaReader.WriteTo(writer.BaseStream, checked ((int)mutaReader.Remaining)); } fileLocs = RecordLocator.GetFileLocations(alignedCellsFile, release, interest); using (var mutaReader = new MutagenBinaryReadStream(alignedCellsFile, parsingBundle)) { using var writer = new MutagenWriter(outputPath.Path, GameConstants.Get(release)); foreach (var grup in fileLocs.GrupLocations) { if (grup <= mutaReader.Position) { continue; } var noRecordLength = grup - mutaReader.Position; mutaReader.WriteTo(writer.BaseStream, (int)noRecordLength); // If complete overall, return if (mutaReader.Complete) { break; } mutaReader.WriteTo(writer.BaseStream, 12); var grupType = (GroupTypeEnum)mutaReader.ReadUInt32(); writer.Write((int)grupType); switch (grupType) { case GroupTypeEnum.WorldChildren: AlignWorldChildren(mutaReader, writer); break; default: break; } } mutaReader.WriteTo(writer.BaseStream, checked ((int)mutaReader.Remaining)); } } }
public object GetFileLocations() { stream.Position = 0; return(RecordLocator.GetFileLocations(stream)); }
public object BaseGRUPIterator() { stream.Position = 0; return(RecordLocator.IterateBaseGroupLocations(stream)); }
public static void MergeGroups( Func <IMutagenReadStream> streamCreator, Stream outputStream, RecordInterest?interest = null) { using var inputStream = streamCreator(); using var inputStreamJumpback = streamCreator(); using var writer = new System.IO.BinaryWriter(outputStream, Encoding.Default, leaveOpen: true); long runningDiff = 0; var fileLocs = RecordLocator.GetLocations( inputStream, interest: interest, additionalCriteria: (stream, recType, len) => { return(stream.GetMajorRecord().IsCompressed); }); inputStream.Position = 0; var dict = new Dictionary <RecordType, List <GroupLocationMarker> >(); foreach (var loc in fileLocs.GrupLocations) { inputStream.Position = loc.Key; var group = inputStream.ReadGroup(); if (!group.IsTopLevel) { continue; } dict.GetOrAdd(loc.Value.ContainedRecordType).Add(loc.Value); } foreach (var val in dict.ToList()) { if (val.Value.Count <= 1) { dict.Remove(val.Key); } } if (dict.Count == 0) { inputStream.BaseStream.Position = 0; inputStream.BaseStream.CopyTo(outputStream); return; } inputStream.Position = 0; var passedSet = new HashSet <RecordType>(); while (!inputStream.Complete) { // Import until next listed group long noRecordLength; if (fileLocs.GrupLocations.TryGetInDirection( inputStream.Position, higher: true, result: out var nextRec)) { noRecordLength = nextRec.Value.Location.Min - inputStream.Position; } else { noRecordLength = inputStream.Remaining; } inputStream.WriteTo(writer.BaseStream, (int)noRecordLength); if (inputStream.Complete) { break; } var groupMeta = inputStream.GetGroup(); if (!dict.TryGetValue(groupMeta.ContainedRecordType, out var groupLocations)) { inputStream.WriteTo(writer.BaseStream, checked ((int)groupMeta.TotalLength)); continue; } if (!passedSet.Add(groupMeta.ContainedRecordType)) { inputStream.Position += groupMeta.TotalLength; continue; } // Write last group header var readPos = inputStream.Position; var writePos = writer.BaseStream.Position; long totalLen = groupMeta.HeaderLength; inputStream.Position = groupLocations.Last().Location.Min; inputStream.WriteTo(writer.BaseStream, groupMeta.HeaderLength); // Write all group contents foreach (var groupLoc in groupLocations) { inputStream.Position = groupLoc.Location.Min; var targetGroupMeta = inputStream.GetGroupFrame(readSafe: false); totalLen += targetGroupMeta.Content.Length; writer.BaseStream.Write(targetGroupMeta.Content); } // Update group length writer.BaseStream.Position = writePos + 4; writer.Write(checked ((uint)totalLen)); // reset for next writer.BaseStream.Position = writePos + totalLen; inputStream.Position = readPos + groupMeta.TotalLength; } }
public async Task <ActionResult> UploadFile() { var file = Request.Files[0]; var apiUrl = ConfigurationManager.AppSettings["PassengerNameListApi.Url"]; var fileContent = string.Empty; using (var stream = new StreamReader(file.InputStream)) { fileContent = stream.ReadToEnd(); } PassengerNameListResponse data; using (var client = new HttpClient()) { using (var content = new MultipartFormDataContent("Upload----" + DateTime.Now.ToString(CultureInfo.InvariantCulture))) { var stream = new MemoryStream(Encoding.UTF8.GetBytes(fileContent)); content.Add(new StreamContent(stream), "passengerFile", file.FileName); content.Headers.Add("APIKey", mydealKey); using (var message = await client.PostAsync(apiUrl + "/passengernamelist/UploadNameList", content)) { var response = await message.Content.ReadAsStringAsync(); data = JsonConvert.DeserializeObject <PassengerNameListResponse>(response); } } } var uploadFileId = 0; if (data != null) { using (var uow = new UnitOfWork(new Models.PassengerNameListDbContext())) { var fileInput = new UploadFile() { FileContent = fileContent, FileName = file.FileName }; uow.UploadFileRepo.Add(fileInput); uow.Complete(); uploadFileId = fileInput.Id; foreach (var item in data.Records) { var record = new RecordLocator(); record.Code = item.RecordLocator; record.UploadFileId = fileInput.Id; uow.RecordLocatorRepo.Add(record); uow.Complete(); foreach (var passenger in item.Passengers) { var passengerModel = new Passenger(); passengerModel.Name = passenger; passengerModel.RecordLocatorId = record.Id; uow.PassengerRepo.Add(passengerModel); } uow.Complete(); } } } var jsonResult = new JsonResult(); var uploadResult = new UploadFileResult { FileId = uploadFileId, InputLineResults = data.InputLineResults, UploadFileId = uploadFileId, UploadFileName = file.FileName }; jsonResult.Data = uploadResult; return(jsonResult); }
public static void Sort( Func <Stream> streamCreator, Stream outputStream, GameMode gameMode) { var meta = new ParsingBundle(GameConstants.Get(gameMode)); using var inputStream = new MutagenBinaryReadStream(streamCreator(), meta); using var locatorStream = new MutagenBinaryReadStream(streamCreator(), meta); using var writer = new MutagenWriter(outputStream, gameMode, dispose: false); while (!inputStream.Complete) { long noRecordLength; foreach (var grupLoc in RecordLocator.IterateBaseGroupLocations(locatorStream)) { noRecordLength = grupLoc.Value - inputStream.Position; inputStream.WriteTo(writer.BaseStream, (int)noRecordLength); // If complete overall, return if (inputStream.Complete) { return; } var groupMeta = inputStream.GetGroup(); if (!groupMeta.IsGroup) { throw new ArgumentException(); } var storage = new Dictionary <FormID, List <ReadOnlyMemorySlice <byte> > >(); using (var grupFrame = new MutagenFrame(inputStream).SpawnWithLength(groupMeta.TotalLength)) { inputStream.WriteTo(writer.BaseStream, meta.Constants.GroupConstants.HeaderLength); locatorStream.Position = grupLoc.Value; foreach (var rec in RecordLocator.ParseTopLevelGRUP(locatorStream)) { MajorRecordHeader majorMeta = inputStream.GetMajorRecord(); storage.TryCreateValue(rec.FormID).Add(inputStream.ReadMemory(checked ((int)majorMeta.TotalLength), readSafe: true)); if (grupFrame.Complete) { continue; } GroupHeader subGroupMeta = inputStream.GetGroup(); if (subGroupMeta.IsGroup) { storage.TryCreateValue(rec.FormID).Add(inputStream.ReadMemory(checked ((int)subGroupMeta.TotalLength), readSafe: true)); } } } foreach (var item in storage.OrderBy((i) => i.Key.ID)) { foreach (var bytes in item.Value) { writer.Write(bytes); } } } inputStream.WriteTo(writer.BaseStream, (int)inputStream.Remaining); } }
public async Task Process( TempFolder tmpFolder, Subject <string> logging, ModPath sourcePath, string preprocessedPath, string outputPath) { this.Logging = logging; this.TempFolder = tmpFolder; this.SourcePath = sourcePath; this.Masters = MasterReferenceReader.FromPath(SourcePath, GameRelease); this.Bundle = new ParsingBundle(GameRelease, Masters); this._NumMasters = GetNumMasters(); this._AlignedFileLocs = RecordLocator.GetFileLocations(new ModPath(ModKey, preprocessedPath), this.GameRelease); var preprocessedBytes = File.ReadAllBytes(preprocessedPath); IMutagenReadStream streamGetter() => new MutagenMemoryReadStream(preprocessedBytes, Bundle); using (var stream = streamGetter()) { lock (_lengthTracker) { foreach (var grup in this._AlignedFileLocs.GrupLocations.And(this._AlignedFileLocs.ListedRecords.Keys)) { stream.Position = grup + 4; this._lengthTracker[grup] = stream.ReadUInt32(); } } await this.PreProcessorJobs(streamGetter); await Task.WhenAll(ExtraJobs(streamGetter)); this.AddDynamicProcessorInstructions(); Parallel.ForEach(this.DynamicProcessors.Keys .And(this.DynamicStreamProcessors.Keys) .And(RecordType.Null) .Distinct(), ParallelOptions, type => ProcessDynamicType(type, streamGetter)); lock (_lengthTracker) { foreach (var grup in this._lengthTracker) { stream.Position = grup.Key + 4; if (grup.Value == stream.ReadUInt32()) { continue; } this._Instructions.SetSubstitution( loc: grup.Key + 4, sub: BitConverter.GetBytes(grup.Value)); } } } var config = this._Instructions.GetConfig(); using (var processor = new BinaryFileProcessor( new FileStream(preprocessedPath, FileMode.Open, FileAccess.Read), config)) { try { using var outStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write); processor.CopyTo(outStream); } catch (Exception) { if (File.Exists(outputPath)) { File.Delete(outputPath); } throw; } } }
// _prevRecord - scan back to the previous record // harder, since we have to scan backwards to find the start of the previous record // TODO: write a reverse fast strchr() clone private void _prevRecord(ref RecordLocator rloc) { byte[] search_buffer = new byte[PREV_RECORD_SCANSIZE]; int read_result_size, read_size; if (!rloc.have_record) { throw new Exception("_prevRecord() called with rloc.have_record==false"); } if (rloc.start_pos == 0) { // we can't go back before the beginning of our block, so if we reach it, we are done rloc.have_record = false; rloc.start_pos = 0; rloc.after_end_pos = 0; rloc.record = default(KeyValuePair<RecordKey,RecordUpdate>); return; } int check_up_to = rloc.start_pos - 1; // stop short, or we'll try to redecode the same record we were handed int cur_stream_pos = Math.Max(0, rloc.start_pos - search_buffer.Length); while (cur_stream_pos >= 0) { this.datastream.Seek(cur_stream_pos, SeekOrigin.Begin); read_size = (int)Math.Min(search_buffer.Length,check_up_to-cur_stream_pos); read_result_size = this.datastream.Read(search_buffer,0,read_size); int bufpos = read_result_size - 1; while (bufpos >= 0) { if (search_buffer[bufpos] == SegmentBlockBasicEncoder.END_OF_LINE) { // decode record int new_record_start = cur_stream_pos + bufpos + 1; this.datastream.Seek(new_record_start, SeekOrigin.Begin); rloc.record = SegmentBlockBasicDecoder._decodeRecordFromBlock(this.datastream); rloc.after_end_pos = (int) this.datastream.Position; if (rloc.after_end_pos != rloc.start_pos) { // if this decode didn't bring us to the start of the record we were // ..handed, then something is WRONG! throw new Exception("_prevRecord() INTERNAL scan error"); } rloc.have_record = true; rloc.start_pos = new_record_start; return; } bufpos--; } // IF we got this far with cur_stream_pos == 0, we're at the start of the block! if (cur_stream_pos == 0) { // TODO: use a crafty state switch to avoid the block duplicated here from above // if we landed on the beginnng of the block, then decode the first record in the block int new_record_start = 0; this.datastream.Seek(new_record_start, SeekOrigin.Begin); rloc.record = SegmentBlockBasicDecoder._decodeRecordFromBlock(this.datastream); rloc.after_end_pos = (int)this.datastream.Position; if (rloc.after_end_pos != rloc.start_pos) { // if this decode didn't bring us to the start of the record we were // ..handed, then something is WRONG! throw new Exception("_prevRecord() INTERNAL scan error"); } rloc.have_record = true; rloc.start_pos = new_record_start; return; } // backup to the next range cur_stream_pos = Math.Max(0, cur_stream_pos - search_buffer.Length); }; throw new Exception("_prevRecord() INTERNAL scan error, dropout"); }
// _nextRecord() - grab the next record // easy, since the last record end point stops right where we need to decode private void _nextRecord(ref RecordLocator rloc) { int new_start = rloc.after_end_pos; int datastream_length = (int)this.datastream.Length; if (!rloc.have_record) { throw new Exception("_nextRecord() called with rloc.have_record==false"); } if (new_start < datastream_length) { this.datastream.Seek(new_start, SeekOrigin.Begin); rloc.start_pos = new_start; rloc.record = SegmentBlockBasicDecoder._decodeRecordFromBlock(this.datastream); rloc.after_end_pos = (int)this.datastream.Position; rloc.have_record = true; return; } else if (new_start == datastream_length) { // reached end rloc.have_record = false; rloc.record = default(KeyValuePair<RecordKey, RecordUpdate>); rloc.start_pos = 0; rloc.after_end_pos = 0; return; } else { // if (new_start > datsstream_length) throw new Exception("error: _nextRecord called with rloc past end"); } }