unsafe ReadHandle ScheduleSingleElementEntryReads(Entry entry, long readSize, void *dst) { var block = m_Blocks[(int)entry.Header.BlockIndex]; var blockOffset = entry.Header.HeaderMeta; var chunkSize = block.Header.ChunkSize; uint chunkIndex = (uint)(blockOffset / chunkSize); var chunk = block.GetOffsetsPtr() + chunkIndex; var chunkWithLocalOffset = *chunk + (uint)(blockOffset % chunkSize); var readCmd = ReadCommandBufferUtils.GetCommand(dst, readSize, chunkWithLocalOffset); return(m_FileReader.Read(&readCmd, 1, ReadMode.Async)); }
unsafe ReadHandle ScheduleConstSizeElementArrayRead(Entry entry, uint firstElement, long readSize, void *dst) { var block = m_Blocks[(int)entry.Header.BlockIndex]; var blockOffset = entry.Header.EntriesMeta * firstElement; var chunkSize = block.Header.ChunkSize; var chunkIndex = (uint)(blockOffset / chunkSize); var chunk = block.GetOffsetsPtr() + chunkIndex; var chunkWithLocalOffset = *chunk + (uint)(blockOffset % chunkSize); byte *dstPtr = (byte *)dst; using (NativeBlockList <ReadCommand> chunkReads = new NativeBlockList <ReadCommand>(64, 64)) { var readCmd = ReadCommandBufferUtils.GetCommand(dstPtr, readSize, chunkWithLocalOffset); chunkReads.Push(readCmd); dstPtr += (long)(chunkSize - (blockOffset % chunkSize)); readSize -= (long)(chunkSize - (blockOffset % chunkSize)); while (readSize > 0) { ++chunkIndex; var chunkReadSize = Math.Min(readSize, (long)chunkSize); chunk = block.GetOffsetsPtr() + chunkIndex; readCmd = ReadCommandBufferUtils.GetCommand(dstPtr, chunkReadSize, *chunk); dstPtr += chunkReadSize; readSize -= chunkReadSize; chunkReads.Push(readCmd); } //TODO: find a way to use the block array chunks directly for scheduling, probably add readcommandbuffer using (var tempCmds = new NativeArray <ReadCommand>((int)chunkReads.Count, Allocator.TempJob)) { ReadCommand *cmdPtr = (ReadCommand *)tempCmds.GetUnsafePtr(); for (int i = 0; i < tempCmds.Length; ++i) { *(cmdPtr + i) = chunkReads[i]; } return(m_FileReader.Read(cmdPtr, (uint)tempCmds.Length, ReadMode.Async)); } } }
//Returns ammount of written bytes unsafe static long ProcessDynamicSizeElement(ref NativeBlockList <ReadCommand> chunkReads, ref Block block, ElementRead elementRead) { long written = 0; var chunkSize = (long)block.Header.ChunkSize; var elementSize = elementRead.end - elementRead.start; var dst = elementRead.readDst; var chunkIndex = elementRead.start / chunkSize; var chunkOffset = block.GetOffsetsPtr()[chunkIndex]; var elementOffsetInChunk = elementRead.start % chunkSize; var remainingChunksize = chunkSize - elementOffsetInChunk; var rSize = Math.Min(chunkSize, elementSize); if (remainingChunksize != chunkSize) { chunkOffset += elementOffsetInChunk; //align the read if (rSize > remainingChunksize) { rSize = remainingChunksize; } } chunkReads.Push(ReadCommandBufferUtils.GetCommand(elementRead.readDst, rSize, chunkOffset)); elementRead.readDst += rSize; elementSize -= rSize; written += rSize; //if the element spans multiple chunks while (elementSize > 0) { chunkIndex++; chunkOffset = block.GetOffsetsPtr()[chunkIndex]; rSize = Math.Min(chunkSize, elementSize); chunkReads.Push(ReadCommandBufferUtils.GetCommand(elementRead.readDst, rSize, chunkOffset)); elementRead.readDst += rSize; elementSize -= rSize; written += rSize; } return(written); }
unsafe static ReadError BuildDataBlocks(LowLevelFileReader file, NativeArray <long> blockOffsets, out NativeArray <Block> blockStorage) { blockStorage = new NativeArray <Block>(blockOffsets.Length, Allocator.Persistent); using (var blockReads = new NativeArray <ReadCommand>(blockOffsets.Length, Allocator.Temp)) { using (var headers = new NativeArray <BlockHeader>(blockOffsets.Length, Allocator.Temp)) { BlockHeader *headerPtr = (BlockHeader *)headers.GetUnsafePtr(); ReadCommand *commandPtr = (ReadCommand *)blockReads.GetUnsafePtr(); for (int i = 0; i < blockStorage.Length; ++i) { var blockOffset = blockOffsets[i]; * commandPtr++ = ReadCommandBufferUtils.GetCommand(headerPtr++, sizeof(BlockHeader), blockOffset); } commandPtr = (ReadCommand *)blockReads.GetUnsafePtr(); if (file.Read((ReadCommand *)blockReads.GetUnsafePtr(), (uint)blockReads.Length, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } for (int i = 0; i < blockStorage.Length; ++i) { var blockOffset = blockOffsets[i]; var block = new Block(headers[i]); blockStorage[i] = block; *commandPtr++ = ReadCommandBufferUtils.GetCommand(block.GetOffsetsPtr(), sizeof(long) * block.OffsetCount, blockOffset + sizeof(BlockHeader)); } commandPtr = (ReadCommand *)blockReads.GetUnsafePtr(); if (file.Read((ReadCommand *)blockReads.GetUnsafePtr(), (uint)blockReads.Length, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } } } return(ReadError.Success); }
unsafe static ReadError BuildDataEntries(LowLevelFileReader file, NativeArray <long> entryTypeOffsets, out NativeArray <Entry> entryStorage) { entryStorage = new NativeArray <Entry>(entryTypeOffsets.Length, Allocator.Persistent); using (var headers = new NativeArray <EntryHeader>(entryTypeOffsets.Length, Allocator.TempJob, NativeArrayOptions.ClearMemory)) { uint writtenCommands = 0; EntryHeader *headersPtr = (EntryHeader *)headers.GetUnsafePtr(); using (var readCommands = new NativeArray <ReadCommand>(entryTypeOffsets.Length, Allocator.TempJob)) { ReadCommand *readCommandsPtr = (ReadCommand *)readCommands.GetUnsafePtr(); for (int i = 0; i < entryTypeOffsets.Length; ++i) { var offset = entryTypeOffsets[i]; if (offset != 0) { var typeSize = sizeof(EntryHeader); readCommandsPtr[writtenCommands++] = ReadCommandBufferUtils.GetCommand((headersPtr + i), sizeof(EntryHeader), offset); } } if (file.Read(readCommandsPtr, writtenCommands, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } writtenCommands = 0; for (int i = 0; i < headers.Length; ++i) { var entry = new Entry(*(headersPtr + i)); entryStorage[i] = entry; var header = (EntryHeader *)(&entry); if (header->Format == EntryFormat.DynamicSizeElementArray) { readCommandsPtr[writtenCommands++] = ReadCommandBufferUtils.GetCommand(entry.GetAdditionalStoragePtr() , sizeof(long) * entry.Count, entryTypeOffsets[i] + sizeof(EntryHeader)); } } if (file.Read(readCommandsPtr, writtenCommands, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } } } var entriesBegin = (Entry *)entryStorage.GetUnsafePtr(); var entriesEnd = entriesBegin + entryStorage.Length; int counter = 0; while (entriesBegin != entriesEnd) { if (entriesBegin->Header.Format == EntryFormat.DynamicSizeElementArray) { //swap back the first entry we read during the header read with the total size at the end of the entries array //also memmove the array by one to the right to make space for the first entry //This is required as we should not have to take cache hits when computing size by having to always jump to the end of the array and back long *storagePtr = entriesBegin->GetAdditionalStoragePtr(); long *headerMetaPtr = (long *)((byte *)entriesBegin + sizeof(EntryFormat) + sizeof(uint) * 2); long totalSize = storagePtr[entriesBegin->Count - 1]; UnsafeUtility.MemMove(storagePtr + 1, storagePtr, sizeof(long) * (entriesBegin->Count - 1)); *storagePtr = *headerMetaPtr; *headerMetaPtr = totalSize; } ++counter; ++entriesBegin; } return(ReadError.Success); }
unsafe static ReadError TryGetBlockEntriesOffsetsWithIntegrityChecks(LowLevelFileReader file, out Blob16Byte blockEntriesOffsets) { const int readCommandCount = 3; Blob16Byte offsets; FormatSignature *sig = stackalloc FormatSignature[2]; ReadCommand * readCommands = stackalloc ReadCommand[readCommandCount]; //read first chapter offset long _8ByteBuffer = -1; //read header sig readCommands[0] = ReadCommandBufferUtils.GetCommand(sig, sizeof(uint), 0); //read tail sig readCommands[1] = ReadCommandBufferUtils.GetCommand(sig + 1, sizeof(uint), file.FileLength - sizeof(uint)); //read chapters start offset readCommands[2] = ReadCommandBufferUtils.GetCommand(&_8ByteBuffer, sizeof(ulong), readCommands[1].Offset - sizeof(ulong)); if (file.Read(readCommands, readCommandCount, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*sig != FormatSignature.HeaderSignature) { return(ReadError.InvalidHeaderSignature); } if (*(sig + 1) != FormatSignature.FooterSignature) { return(ReadError.InvalidFooterSignature); } if (!(_8ByteBuffer < file.FileLength && _8ByteBuffer > 0)) { return(ReadError.InvalidChapterLocation); } //read directory signature readCommands[0] = ReadCommandBufferUtils.GetCommand(sig, sizeof(uint), _8ByteBuffer); //read chapter version readCommands[1] = ReadCommandBufferUtils.GetCommand(sig + 1, sizeof(uint), _8ByteBuffer + sizeof(uint)); //read blocks offset readCommands[2] = ReadCommandBufferUtils.GetCommand(&_8ByteBuffer, sizeof(ulong), readCommands[1].Offset + sizeof(uint)); if (file.Read(readCommands, 3, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*sig != FormatSignature.DirectorySignature) { return(ReadError.InvalidDirectorySignature); } if (*(sig + 1) != FormatSignature.ChapterSectionVersion) { return(ReadError.InvalidChapterSectionVersion); } //computed offset in file for entries var tmpEntriesOffset = readCommands[2].Offset + sizeof(ulong); readCommands[0] = ReadCommandBufferUtils.GetCommand(sig, sizeof(uint), _8ByteBuffer); if (file.Read(readCommands, 1, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*sig != FormatSignature.BlockSectionVersion) { return(ReadError.InvalidBlockSectionVersion); } long *dataPtr = (long *)(&offsets); *dataPtr++ = tmpEntriesOffset; *dataPtr = _8ByteBuffer + sizeof(uint); blockEntriesOffsets = offsets; return(ReadError.Success); }
unsafe ReadError InternalOpen(string filePath) { Dispose(); m_FileReader = new LowLevelFileReader(filePath); //first 8byte are entriesCount offset, the next are blockCount offset Blob16Byte fileOffsets; var error = TryGetBlockEntriesOffsetsWithIntegrityChecks(m_FileReader, out fileOffsets); if (error != ReadError.Success) { return(error); } long * fileOffsetsPtr = (long *)(&fileOffsets); int * counts = stackalloc int[2]; ReadCommand *commands = stackalloc ReadCommand[2]; //read entry offset count commands[0] = ReadCommandBufferUtils.GetCommand(counts, sizeof(int), *fileOffsetsPtr); //read block offset count commands[1] = ReadCommandBufferUtils.GetCommand(counts + 1, sizeof(int), *(fileOffsetsPtr + 1)); if (m_FileReader.Read(commands, 2, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*(counts + 1) < 1) { return(ReadError.InvalidBlockSectionCount); } if (*counts > (int)EntryType.Count) { *counts = (int)EntryType.Count; } var entryTypeToChapterOffset = new NativeArray <long>(counts[0], Allocator.TempJob, NativeArrayOptions.ClearMemory); var dataBlockOffsets = new NativeArray <long>(counts[1], Allocator.TempJob); //read entry offsets commands[0] = ReadCommandBufferUtils.GetCommand(entryTypeToChapterOffset.GetUnsafePtr(), sizeof(long) * counts[0], commands[0].Offset + sizeof(int)); //read block offsets commands[1] = ReadCommandBufferUtils.GetCommand(dataBlockOffsets.GetUnsafePtr(), sizeof(long) * counts[1], commands[1].Offset + sizeof(uint)); if (m_FileReader.Read(commands, 2, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } error = BuildDataBlocks(m_FileReader, dataBlockOffsets, out m_Blocks); dataBlockOffsets.Dispose(); //dispose of the block offsets as they are no longer needed if (error != ReadError.Success) { return(error); } error = BuildDataEntries(m_FileReader, entryTypeToChapterOffset, out m_Entries); entryTypeToChapterOffset.Dispose(); if (error != ReadError.Success) { return(error); } return(ReadError.Success); }